forked from Ponysearch/Ponysearch
Merge branch 'searxng:master' into master
This commit is contained in:
commit
e01362fee2
21 changed files with 101 additions and 120 deletions
|
@ -3,18 +3,23 @@
|
|||
;; Per-Directory Local Variables:
|
||||
;; https://www.gnu.org/software/emacs/manual/html_node/emacs/Directory-Variables.html
|
||||
;;
|
||||
;; .. hint::
|
||||
;; For full fledge developer tools install emacs packages:
|
||||
;;
|
||||
;; If you get ``*** EPC Error ***`` (even after a jedi:install-server) in
|
||||
;; your emacs session, mostly you have jedi-mode enabled but the python
|
||||
;; environment is missed. The python environment has to be next to the
|
||||
;; ``<repo>/.dir-locals.el`` in::
|
||||
;; M-x package-install ...
|
||||
;;
|
||||
;; ./local/py3
|
||||
;; magit gitconfig
|
||||
;; nvm lsp-mode lsp-pyright lsp-eslint
|
||||
;; pyvenv pylint pip-requirements
|
||||
;; jinja2-mode
|
||||
;; json-mode
|
||||
;; company company-jedi company-quickhelp company-shell
|
||||
;; realgud
|
||||
;; sphinx-doc markdown-mode graphviz-dot-mode
|
||||
;; apache-mode nginx-mode
|
||||
;;
|
||||
;; To setup such an environment, build target::
|
||||
;; To setup a developer environment, build target::
|
||||
;;
|
||||
;; $ make pyenv.install
|
||||
;; $ make node.env.dev pyenv.install
|
||||
;;
|
||||
;; Some buffer locals are referencing the project environment:
|
||||
;;
|
||||
|
@ -29,26 +34,11 @@
|
|||
;; (setq python-shell-virtualenv-root "/path/to/env/")
|
||||
;; - python-shell-interpreter --> <repo>/local/py3/bin/python
|
||||
;;
|
||||
;; Python development:
|
||||
;;
|
||||
;; Jedi, flycheck & other python stuff should use the 'python-shell-interpreter'
|
||||
;; from the local py3 environment.
|
||||
;;
|
||||
;; For pyright support you need to install::
|
||||
;;
|
||||
;; M-x package-install lsp-pyright
|
||||
;;
|
||||
;; Other useful jedi stuff you might add to your ~/.emacs::
|
||||
;;
|
||||
;; (global-set-key [f6] 'flycheck-mode)
|
||||
;; (add-hook 'python-mode-hook 'my:python-mode-hook)
|
||||
;;
|
||||
;; (defun my:python-mode-hook ()
|
||||
;; (add-to-list 'company-backends 'company-jedi)
|
||||
;; (require 'jedi-core)
|
||||
;; (jedi:setup)
|
||||
;; (define-key python-mode-map (kbd "C-c C-d") 'jedi:show-doc)
|
||||
;; (define-key python-mode-map (kbd "M-.") 'jedi:goto-definition)
|
||||
;; (define-key python-mode-map (kbd "M-,") 'jedi:goto-definition-pop-marker)
|
||||
;; )
|
||||
|
||||
((nil
|
||||
. ((fill-column . 80)
|
||||
|
@ -67,6 +57,12 @@
|
|||
;; to get in use of NVM environment, install https://github.com/rejeep/nvm.el
|
||||
(setq-local nvm-dir (expand-file-name "./.nvm" prj-root))
|
||||
|
||||
;; use nodejs from the (local) NVM environment (see nvm-dir)
|
||||
(nvm-use-for-buffer)
|
||||
(ignore-errors (require 'lsp))
|
||||
(setq-local lsp-server-install-dir (car (cdr nvm-current-version)))
|
||||
(setq-local lsp-enable-file-watchers nil)
|
||||
|
||||
;; use 'py3' environment as default
|
||||
(setq-local python-environment-default-root-name
|
||||
"py3")
|
||||
|
@ -100,22 +96,22 @@
|
|||
|
||||
(js-mode
|
||||
. ((eval . (progn
|
||||
;; use nodejs from the (local) NVM environment (see nvm-dir)
|
||||
(nvm-use-for-buffer)
|
||||
(ignore-errors (require 'lsp-eslint))
|
||||
(setq-local js-indent-level 2)
|
||||
;; flycheck should use the eslint checker from developer tools
|
||||
(setq-local flycheck-javascript-eslint-executable
|
||||
(expand-file-name "node_modules/.bin/eslint" prj-root))
|
||||
;; (flycheck-mode)
|
||||
|
||||
(flycheck-mode)
|
||||
(if (featurep 'lsp-eslint)
|
||||
(lsp))
|
||||
))))
|
||||
|
||||
(python-mode
|
||||
. ((eval . (progn
|
||||
;; use nodejs from the (local) NVM environment (see nvm-dir)
|
||||
(nvm-use-for-buffer)
|
||||
(if (featurep 'lsp-pyright)
|
||||
(lsp))
|
||||
(ignore-errors (require 'jedi-core))
|
||||
(ignore-errors (require 'lsp-pyright))
|
||||
(ignore-errors (sphinx-doc-mode))
|
||||
(setq-local python-environment-virtualenv
|
||||
(list (expand-file-name "bin/virtualenv" python-shell-virtualenv-root)
|
||||
;;"--system-site-packages"
|
||||
|
@ -124,6 +120,9 @@
|
|||
(setq-local pylint-command
|
||||
(expand-file-name "bin/pylint" python-shell-virtualenv-root))
|
||||
|
||||
(if (featurep 'lsp-pyright)
|
||||
(lsp))
|
||||
|
||||
;; pylint will find the '.pylintrc' file next to the CWD
|
||||
;; https://pylint.readthedocs.io/en/latest/user_guide/run.html#command-line-options
|
||||
(setq-local flycheck-pylintrc
|
||||
|
|
|
@ -404,4 +404,4 @@ known-third-party=enchant
|
|||
|
||||
# Exceptions that will emit a warning when being caught. Defaults to
|
||||
# "Exception"
|
||||
overgeneral-exceptions=Exception
|
||||
overgeneral-exceptions=builtins.Exception
|
||||
|
|
|
@ -168,3 +168,4 @@ features or generally made searx better:
|
|||
- Ahmad Alkadri `<https://github.com/ahmad-alkadri>`_
|
||||
- Milad Laly @Milad-Laly
|
||||
- @llmII
|
||||
- @blob42 `<https://blob42.xyz>`_
|
||||
|
|
2
Makefile
2
Makefile
|
@ -80,7 +80,7 @@ MANAGE += data.all data.languages data.useragents data.osm_keys_tags
|
|||
MANAGE += docs.html docs.live docs.gh-pages docs.prebuild docs.clean
|
||||
MANAGE += docker.build docker.push docker.buildx
|
||||
MANAGE += gecko.driver
|
||||
MANAGE += node.env node.clean
|
||||
MANAGE += node.env node.env.dev node.clean
|
||||
MANAGE += py.build py.clean
|
||||
MANAGE += pyenv pyenv.install pyenv.uninstall
|
||||
MANAGE += pypi.upload pypi.upload.test
|
||||
|
|
|
@ -657,8 +657,9 @@ and can relied on the default configuration :origin:`searx/settings.yml` using:
|
|||
``engines:``
|
||||
With ``use_default_settings: true``, each settings can be override in a
|
||||
similar way, the ``engines`` section is merged according to the engine
|
||||
``name``. In this example, SearXNG will load all the engine and the arch linux
|
||||
wiki engine has a :ref:`token <private engines>`:
|
||||
``name``. In this example, SearXNG will load all the default engines, will
|
||||
enable the ``bing`` engine and define a :ref:`token <private engines>` for
|
||||
the arch linux engine:
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
|
@ -668,6 +669,9 @@ and can relied on the default configuration :origin:`searx/settings.yml` using:
|
|||
engines:
|
||||
- name: arch linux wiki
|
||||
tokens: ['$ecretValue']
|
||||
- name: bing
|
||||
disabled: false
|
||||
|
||||
|
||||
``engines:`` / ``remove:``
|
||||
It is possible to remove some engines from the default settings. The following
|
||||
|
|
16
manage
16
manage
|
@ -97,7 +97,8 @@ redis:
|
|||
install : create user (${REDIS_USER}) and install systemd service (${REDIS_SERVICE_NAME})
|
||||
help : show more redis commands
|
||||
node.:
|
||||
env : download & install npm dependencies locally
|
||||
env : download & install SearXNG's npm dependencies locally
|
||||
env.dev : download & install developer and CI tools
|
||||
clean : drop locally npm installations
|
||||
py.:
|
||||
build : Build python packages at ./${PYDIST}
|
||||
|
@ -563,16 +564,15 @@ nodejs.ensure() {
|
|||
node.env() {
|
||||
nodejs.ensure
|
||||
( set -e
|
||||
|
||||
build_msg INSTALL "searx/static/themes/simple/package.json"
|
||||
build_msg INSTALL "./searx/static/themes/simple/package.json"
|
||||
npm --prefix searx/static/themes/simple install
|
||||
)
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
node.env.devtools() {
|
||||
node.env.dev() {
|
||||
nodejs.ensure
|
||||
build_msg INSTALL "package.json: developer and CI tools"
|
||||
build_msg INSTALL "./package.json: developer and CI tools"
|
||||
npm install
|
||||
}
|
||||
|
||||
|
@ -585,6 +585,10 @@ node.clean() {
|
|||
( set -e
|
||||
npm --prefix searx/static/themes/simple run clean
|
||||
)
|
||||
build_msg CLEAN "locally installed developer and CI tools"
|
||||
( set -e
|
||||
npm --prefix . run clean
|
||||
)
|
||||
dump_return $?
|
||||
}
|
||||
|
||||
|
@ -702,7 +706,7 @@ test.pylint() {
|
|||
|
||||
test.pyright() {
|
||||
build_msg TEST "[pyright] static type check of python sources"
|
||||
node.env.devtools
|
||||
node.env.dev
|
||||
# We run Pyright in the virtual environment because Pyright
|
||||
# executes "python" to determine the Python version.
|
||||
build_msg TEST "[pyright] suppress warnings related to intentional monkey patching"
|
||||
|
|
|
@ -2,5 +2,8 @@
|
|||
"dependencies": {
|
||||
"eslint": "^8.18.0",
|
||||
"pyright": "^1.1.255"
|
||||
},
|
||||
"scripts": {
|
||||
"clean": "rm -Rf node_modules package-lock.json"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,7 +2,7 @@ mock==5.0.1
|
|||
nose2[coverage_plugin]==0.12.0
|
||||
cov-core==1.15.0
|
||||
black==22.12.0
|
||||
pylint==2.15.10
|
||||
pylint==2.16.2
|
||||
splinter==0.19.0
|
||||
selenium==4.8.0
|
||||
twine==4.0.2
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
certifi==2022.12.7
|
||||
babel==2.11.0
|
||||
flask-babel==3.0.1
|
||||
flask==2.2.2
|
||||
flask==2.2.3
|
||||
jinja2==3.1.2
|
||||
lxml==4.9.2
|
||||
pygments==2.14.0
|
||||
|
@ -12,7 +12,7 @@ Brotli==1.0.9
|
|||
uvloop==0.17.0
|
||||
httpx-socks[asyncio]==0.7.2
|
||||
setproctitle==1.3.2
|
||||
redis==4.4.2
|
||||
redis==4.5.1
|
||||
markdown-it-py==2.1.0
|
||||
typing_extensions==4.4.0
|
||||
typing_extensions==4.5.0
|
||||
fasttext-predict==0.9.2.1
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
"""This module implements functions needed for the autocompleter.
|
||||
|
||||
"""
|
||||
# pylint: disable=use-dict-literal
|
||||
|
||||
from json import loads
|
||||
from urllib.parse import urlencode
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
# lint: pylint
|
||||
"""Semantic Scholar (Science)
|
||||
"""
|
||||
# pylint: disable=use-dict-literal
|
||||
|
||||
from urllib.parse import urlencode
|
||||
from searx.utils import html_to_text
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
"""Docker Hub (IT)
|
||||
|
||||
"""
|
||||
# pylint: disable=use-dict-literal
|
||||
|
||||
from json import loads
|
||||
from urllib.parse import urlencode
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
"""
|
||||
Gigablast (Web)
|
||||
"""
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: disable=invalid-name, use-dict-literal
|
||||
|
||||
import re
|
||||
from time import time
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# lint: pylint
|
||||
"""A plugin to check if the ip address of the request is a TOR exit node if the
|
||||
"""A plugin to check if the ip address of the request is a Tor exit-node if the
|
||||
user searches for ``tor-check``. It fetches the tor exit node list from
|
||||
https://check.torproject.org/exit-addresses and parses all the IPs into a list,
|
||||
then checks if the user's IP address is in it.
|
||||
|
@ -26,8 +26,8 @@ name = gettext("Tor check plugin")
|
|||
'''Translated name of the plugin'''
|
||||
|
||||
description = gettext(
|
||||
"This plugin checks if the address of the request is a TOR exit node, and"
|
||||
" informs the user if it is, like check.torproject.org but from searxng."
|
||||
"This plugin checks if the address of the request is a Tor exit-node, and"
|
||||
" informs the user if it is; like check.torproject.org, but from SearXNG."
|
||||
)
|
||||
'''Translated description of the plugin.'''
|
||||
|
||||
|
@ -60,7 +60,7 @@ def post_search(request, search):
|
|||
# No answer, return error
|
||||
search.result_container.answers["tor"] = {
|
||||
"answer": gettext(
|
||||
"The TOR exit node list (https://check.torproject.org/exit-addresses) is unreachable."
|
||||
"Could not download the list of Tor exit-nodes from: https://check.torproject.org/exit-addresses"
|
||||
)
|
||||
}
|
||||
return True
|
||||
|
@ -75,13 +75,17 @@ def post_search(request, search):
|
|||
if ip_address in node_list:
|
||||
search.result_container.answers["tor"] = {
|
||||
"answer": gettext(
|
||||
"You are using TOR. Your IP address seems to be: {ip_address}.".format(ip_address=ip_address)
|
||||
"You are using Tor and it looks like you have this external IP address: {ip_address}".format(
|
||||
ip_address=ip_address
|
||||
)
|
||||
)
|
||||
}
|
||||
else:
|
||||
search.result_container.answers["tor"] = {
|
||||
"answer": gettext(
|
||||
"You are not using TOR. Your IP address seems to be: {ip_address}.".format(ip_address=ip_address)
|
||||
"You are not using Tor and you have this external IP address: {ip_address}".format(
|
||||
ip_address=ip_address
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
"""Processores for engine-type: ``online``
|
||||
|
||||
"""
|
||||
# pylint: disable=use-dict-literal
|
||||
|
||||
from timeit import default_timer
|
||||
import asyncio
|
||||
|
|
|
@ -583,6 +583,7 @@ engines:
|
|||
engine: tineye
|
||||
shortcut: tin
|
||||
timeout: 9.0
|
||||
disabled: true
|
||||
|
||||
- name: etymonline
|
||||
engine: xpath
|
||||
|
@ -1002,38 +1003,6 @@ engines:
|
|||
# collection: 'reviews' # name of the db collection
|
||||
# key: 'name' # key in the collection to search for
|
||||
|
||||
- name: neeva
|
||||
engine: xpath
|
||||
shortcut: nv
|
||||
time_range_support: true
|
||||
time_range_url: '&alf%5Bfreshness%5D={time_range_val}'
|
||||
time_range_map:
|
||||
day: 'Day'
|
||||
week: 'Week'
|
||||
month: 'Month'
|
||||
year: 'Year'
|
||||
search_url: https://neeva.com/search?q={query}&c=All&src=Pagination&page={pageno}{time_range}
|
||||
results_xpath: //div[@class="web-index__component-2rKiM"] | //li[@class="web-rich-deep-links__deepLink-SIbD4"]
|
||||
url_xpath: .//a[@class="lib-doc-title__link-1b9rC"]/@href | ./h2/a/@href
|
||||
title_xpath: .//a[@class="lib-doc-title__link-1b9rC"] | ./h2/a
|
||||
content_xpath: >
|
||||
.//div[@class="lib-doc-snippet__component-3ewW6"]/text() |
|
||||
.//div[@class="lib-doc-snippet__component-3ewW6"]/*[not(self::a)] |
|
||||
./p
|
||||
content_html_to_text: true
|
||||
suggestion_xpath: //span[@class="result-related-searches__link-2ho_u"]
|
||||
paging: true
|
||||
disabled: true
|
||||
categories: [general, web]
|
||||
timeout: 5.0
|
||||
soft_max_redirects: 2
|
||||
about:
|
||||
website: https://neeva.com
|
||||
official_api_documentation:
|
||||
use_official_api: false
|
||||
require_api_key: false
|
||||
results: HTML
|
||||
|
||||
- name: npm
|
||||
engine: json_engine
|
||||
paging: true
|
||||
|
@ -1523,7 +1492,8 @@ engines:
|
|||
|
||||
- name: wiby
|
||||
engine: json_engine
|
||||
search_url: https://wiby.me/json/?q={query}
|
||||
paging: true
|
||||
search_url: https://wiby.me/json/?q={query}&p={pageno}
|
||||
url_query: URL
|
||||
title_query: Title
|
||||
content_query: Snippet
|
||||
|
@ -1908,13 +1878,15 @@ engines:
|
|||
engine: xpath
|
||||
paging: true
|
||||
search_url: https://petalsearch.com/search?query={query}&pn={pageno}
|
||||
results_xpath: //div[@class="webpage-content"]/div[@class="title-cont"]/a
|
||||
url_xpath: ./@href
|
||||
title_xpath: .
|
||||
content_xpath: ../../div[@class="webpage-text"]
|
||||
suggestion_xpath: //div[@class="related-search-items"]/a
|
||||
url_xpath: //div[@class='card-source']
|
||||
title_xpath: //div[@class='title-name']
|
||||
content_xpath: //div[@class='webpage-text']
|
||||
first_page_num: 1
|
||||
disabled: true
|
||||
headers:
|
||||
User-Agent: Mozilla/5.0 (Linux; Android 7.0;) \
|
||||
AppleWebKit/537.36 (KHTML, like Gecko) \
|
||||
Mobile Safari/537.36 (compatible; PetalBot;+https://webmaster.petalsearch.com/site/petalbot)
|
||||
about:
|
||||
website: https://petalsearch.com/
|
||||
wikidata_id: Q104399280
|
||||
|
@ -1928,26 +1900,6 @@ engines:
|
|||
disabled: true
|
||||
timeout: 3.0
|
||||
|
||||
- name: petalsearch news
|
||||
shortcut: ptsn
|
||||
categories: news
|
||||
engine: xpath
|
||||
paging: true
|
||||
search_url: https://petalsearch.com/search?channel=news&query={query}&pn={pageno}
|
||||
results_xpath: //div[@class="news-container"]/div/div/div/a
|
||||
url_xpath: ./@href
|
||||
title_xpath: ./div
|
||||
content_xpath: ../div[@class="news-text"]
|
||||
thumbnail_xpath: ../../../../img/@src
|
||||
first_page_num: 1
|
||||
disabled: true
|
||||
about:
|
||||
website: https://petalsearch.com/
|
||||
wikidata_id: Q104399280
|
||||
use_official_api: false
|
||||
require_api_key: false
|
||||
results: HTML
|
||||
|
||||
- name: lib.rs
|
||||
shortcut: lrs
|
||||
engine: xpath
|
||||
|
|
|
@ -65,7 +65,7 @@
|
|||
|
||||
{% if suggestions %}
|
||||
<div id="suggestions" role="complementary" aria-labelledby="suggestions-title">
|
||||
<h4 class="title" id="suggestions-title">{{ _('Suggestions') }} : </h4>
|
||||
<h4 class="title" id="suggestions-title">{{ _('Suggestions') }}: </h4>
|
||||
<div class="wrapper">
|
||||
{% for suggestion in suggestions %}
|
||||
<form method="{{ method or 'POST' }}" action="{{ url_for('search') }}">
|
||||
|
|
|
@ -5,6 +5,8 @@
|
|||
"""WebbApp
|
||||
|
||||
"""
|
||||
# pylint: disable=use-dict-literal
|
||||
|
||||
import hashlib
|
||||
import hmac
|
||||
import json
|
||||
|
@ -815,7 +817,7 @@ def search():
|
|||
q=request.form['q'],
|
||||
selected_categories = search_query.categories,
|
||||
pageno = search_query.pageno,
|
||||
time_range = search_query.time_range,
|
||||
time_range = search_query.time_range or '',
|
||||
number_of_results = format_decimal(number_of_results),
|
||||
suggestions = suggestion_urls,
|
||||
answers = result_container.answers,
|
||||
|
|
|
@ -9,6 +9,7 @@ Output file: :origin:`searx/data/ahmia_blacklist.txt` (:origin:`CI Update data
|
|||
.. _Ahmia's blacklist: https://ahmia.fi/blacklist/
|
||||
|
||||
"""
|
||||
# pylint: disable=use-dict-literal
|
||||
|
||||
from os.path import join
|
||||
|
||||
|
@ -21,6 +22,7 @@ URL = 'https://ahmia.fi/blacklist/banned/'
|
|||
def fetch_ahmia_blacklist():
|
||||
resp = requests.get(URL, timeout=3.0)
|
||||
if resp.status_code != 200:
|
||||
# pylint: disable=broad-exception-raised
|
||||
raise Exception("Error fetching Ahmia blacklist, HTTP code " + resp.status_code)
|
||||
return resp.text.split()
|
||||
|
||||
|
|
|
@ -1,19 +1,19 @@
|
|||
#!/usr/bin/env python
|
||||
# lint: pylint
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
"""Fetch firefox useragent signatures
|
||||
|
||||
Output file: :origin:`searx/data/useragents.json` (:origin:`CI Update data ...
|
||||
<.github/workflows/data-update.yml>`).
|
||||
|
||||
"""
|
||||
# pylint: disable=use-dict-literal
|
||||
|
||||
import json
|
||||
import re
|
||||
from os.path import join
|
||||
from urllib.parse import urlparse, urljoin
|
||||
from distutils.version import LooseVersion # pylint: disable=deprecated-module
|
||||
from packaging.version import parse
|
||||
|
||||
import requests
|
||||
from lxml import html
|
||||
|
@ -40,6 +40,7 @@ useragents = {
|
|||
def fetch_firefox_versions():
|
||||
resp = requests.get(URL, timeout=2.0)
|
||||
if resp.status_code != 200:
|
||||
# pylint: disable=broad-exception-raised
|
||||
raise Exception("Error fetching firefox versions, HTTP code " + resp.status_code)
|
||||
dom = html.fromstring(resp.text)
|
||||
versions = []
|
||||
|
@ -50,7 +51,7 @@ def fetch_firefox_versions():
|
|||
if path.startswith(RELEASE_PATH):
|
||||
version = path[len(RELEASE_PATH) : -1]
|
||||
if NORMAL_REGEX.match(version):
|
||||
versions.append(LooseVersion(version))
|
||||
versions.append(parse(version))
|
||||
|
||||
list.sort(versions, reverse=True)
|
||||
return versions
|
||||
|
@ -60,11 +61,11 @@ def fetch_firefox_last_versions():
|
|||
versions = fetch_firefox_versions()
|
||||
|
||||
result = []
|
||||
major_last = versions[0].version[0]
|
||||
major_last = versions[0].major
|
||||
major_list = (major_last, major_last - 1)
|
||||
for version in versions:
|
||||
major_current = version.version[0]
|
||||
minor_current = version.version[1]
|
||||
major_current = version.major
|
||||
minor_current = version.minor
|
||||
if major_current in major_list:
|
||||
user_agent_version = f'{major_current}.{minor_current}'
|
||||
if user_agent_version not in result:
|
||||
|
|
|
@ -27,6 +27,7 @@ nvm.env() {
|
|||
source "${NVM_DIR}/nvm.sh"
|
||||
source "${NVM_DIR}/bash_completion"
|
||||
[ "$VERBOSE" = "1" ] && info_msg "sourced NVM environment from ${NVM_DIR}"
|
||||
return 0
|
||||
}
|
||||
|
||||
nvm.is_installed() {
|
||||
|
@ -102,11 +103,15 @@ EOF
|
|||
nvm.install() {
|
||||
local NVM_VERSION_TAG
|
||||
info_msg "install (update) NVM at ${NVM_DIR}"
|
||||
if [[ -d "${NVM_DIR}" ]] ; then
|
||||
if nvm.is_installed; then
|
||||
info_msg "already cloned at: ${NVM_DIR}"
|
||||
pushd "${NVM_DIR}" &> /dev/null
|
||||
git fetch --all | prefix_stdout " ${_Yellow}||${_creset} "
|
||||
else
|
||||
# delete any leftovers from previos installations
|
||||
if nvm.is_local; then
|
||||
rm -rf "${NVM_DIR}"
|
||||
fi
|
||||
info_msg "clone: ${NVM_GIT_URL}"
|
||||
git clone "${NVM_GIT_URL}" "${NVM_DIR}" 2>&1 | prefix_stdout " ${_Yellow}||${_creset} "
|
||||
pushd "${NVM_DIR}" &> /dev/null
|
||||
|
|
Loading…
Reference in a new issue