21. NeoVim配置
$ mkdir ~/.config/nvim
$ touch ~/.config/nvim/init.vim
$ curl -fLo ~/.local/share/nvim/site/autoload/plug.vim --create-dirs https://gitee.com/c4pr1c3/vim-plug/raw/master/plug.vim
在 init.vim 添加如下内容:
打开一个 nvim 窗口,输入:PlugInstall
。安装完成后,输入 :checkhealth
。
[coc.nvim] build/index.js not found, please install dependencies and compile coc.nvim by: yarn install
在 ~/.vim/plugged/coc.nvim 下执行 yarn install(yarn 使用 npm install -g yarn),如果使用 sdkman 安装了 hadoop,将会使用 hadoop 中的 yarn。
安装 Coc 插件:
:CocInstall coc-clangd # C++环境插件
:CocInstall coc-cmake # Cmake 支持
:CocInstall coc-emmet
:CocInstall coc-git # git 支持
:CocInstall coc-highlight # 高亮支持
:CocInstall coc-jedi # jedi
:CocInstall coc-json # json 文件支持
# :CocInstall coc-python # python 环境支持
:CocInstall coc-sh # bash 环境支持
:CocInstall coc-snippets # python提供 snippets
:CocInstall coc-vimlsp # lsp
:CocInstall coc-yaml # yaml
:CocInstall coc-syntax
:CocInstall coc-pairs
:CocInstall coc-bookmark
# 一行安装
:CocInstall coc-clangd coc-cmake coc-emmet coc-git coc-highlight coc-pairs
:CocInstall coc-jedi coc-json coc-sh coc-snippets coc-vimlsp coc-yaml coc-syntax
参考文档:https://www.cnblogs.com/cniwoq/p/13272746.html
报错:https://www.cnblogs.com/sober-orange/p/cocnvim-build-indexjs.html
[coc.nvim] build/index.js not found, please install dependencies and compile coc.nvim by: yarn install
- Unknown function: SemshiBufWipeout,解决办法:
:UpdateRemotePlugins
,会看见具体报错 - No module named pynvim:
pip install pynvim
$ echo -e ‘eval “$(starship init bash)”‘ >> ~/.bashrc
$ echo -e ‘eval “$(starship init zsh)”‘ >> ~/.zshrc
对于 Nu Shell,使用 `config path` 找到配置文件,添加以下内容:
```bash
startup = [
"mkdir ~/.cache/starship",
"starship init nu | save ~/.cache/starship/init.nu",
"source ~/.cache/starship/init.nu"
]
prompt = "starship_prompt"
28. 安装R
https://www.yuque.com/yumingmin/dash/ym8fwr
20. 启动脚本
#!/bin/zsh
local_ip=`ifconfig -a|grep inet|grep -v 127.0.0.1|grep -v inet6|awk '{print $2}'|tr -d "addr:"`
# >>> conda initialize >>>
# !! Contents within this block are managed by 'conda init' !!
__conda_setup="$('/home/yumingmin/bigdata/miniconda3/bin/conda' 'shell.zsh' 'hook' 2> /dev/null)"
if [ $? -eq 0 ]; then
eval "$__conda_setup"
else
if [ -f "/home/yumingmin/bigdata/miniconda3/etc/profile.d/conda.sh" ]; then
. "/home/yumingmin/bigdata/miniconda3/etc/profile.d/conda.sh"
else
export PATH="/home/yumingmin/bigdata/miniconda3/bin:$PATH"
fi
fi
unset __conda_setup
# <<< conda initialize <<<
while true
do
echo -e "\n\033[40;32m=====================🔎任务管理🔎==================== \033[0m\n"
echo -e " 🚀 "1\)启动 Jupyter Notebook
echo -e " 🚀 "2\)启动 Redis Server
echo -e " 🚀 "3\)启动 MySQL
echo -e " 🚀 "4\)启动 Airflow
echo -e " 🚀 "5\)启动 Superset
echo -e " 🚀 "6\)启动 Zeppelin
echo -e " 🚀 "7\)启动 RStudio Server
echo -e " 🚀 "8\)启动 DolphinScheduler
echo -e " 🚀 "9\)启动 挂载阿里云盘
echo -e " ❌ "40\)关闭 Airflow
echo -e " ❌ "90\)关闭阿里云盘
echo -e " ❌ "0\)退出
echo -e "请选择(单选): \c"
read choice
case $choice in
0 )
conda activate zeppelin_py37
break
cd
;;
1 )
echo `date +%EY-%m-%d-%H:%M:%S`: "⏳正在启动 Jupyter Notebook..."
nohup jupyter notebook --ip='*' --port=8888 --no-browser --notebook_dir=~/notebooks >> ~/.logs/jupyter-notebook.run.log 2>&1 &
sleep 1s
head -n 30 ~/.logs/jupyter-notebook.run.log
echo "😁 Jupyter Notebook 启动成功!\n"
echo "🚀 请访问 http://${local_ip}:8888"
continue;;
2 )
echo `date +%EY-%m-%d-%H:%M:%S`: "⏳ 正在启动 Redis Server..."
echo -e "\n Executing the command: \033[40;31m redis-server ~/.redis/redis.conf \033[0m \n"
nohup redis-server ~/.redis/redis.conf >> ~/.logs/redis.run.log 2>&1 &
sleep 1s
head -n 30 ~/.logs/redis.log
echo "😁 Redis Server 启动成功!"
continue;;
3 )
echo `date +%EY-%m-%d-%H:%M:%S`: "⏳ 正在启动 MySQL..."
echo -e "\n Executing the command: \033[40;31m service mysql start \033[0m \n"
sudo mkdir -p /var/run/mysqld
sudo chown mysql:mysql /var/run/mysqld
sudo service mysql start
sudo service mysql status
echo "😁 MySQL 启动成功!"
continue;;
4 )
echo `date +%EY-%m-%d-%H:%M:%S`: "⏳ 正在启动 Airflow..."
# conda activate airflow_py38
echo -e "\n Executing the command: \033[40;31m airflow webserver \033[0m \n"
nohup airflow webserver >> ~/.logs/airflow-webserver.log 2>&1 &
sleep 1s
head -n 30 ~/.logs/airflow-webserver.log
echo -e "\n Executing the command: \033[40;31m airflow scheduler \033[0m \n"
nohup airflow scheduler >> ~/.logs/airflow-scheduler.log 2>&1 &
sleep 1s
head -n 30 ~/.logs/airflow-scheduler.log
# Celery 方式需要启动
echo -e "\n Executing the command: \033[40;31m airflow celery flower \033[0m \n"
nohup airflow celery flower >> ~/.logs/airflow-celery-flower.log 2>&1 &
sleep 1s
head -n 30 ~/.logs/airflow-celery-flower.log
echo -e "\n Executing the command: \033[40;31m airflow celery worker \033[0m \n"
nohup airflow celery worker >> ~/.logs/airflow-celery-worker.log 2>&1 &
sleep 1s
head -n 30 ~/.logs/airflow-celery-worker.log
echo "😁 Airflow 启动完成!"
echo "🚀 请访问 http://${local_ip}:9000"
continue;;
5 )
echo `date +%EY-%m-%d-%H:%M:%S`: "⏳ 正在启动 Superset..."
conda activate superset_py38
echo -e "\n Executing the command: \033[40;31m superset run -h 0.0.0.0 -p 8088 --with-threads --reload --debugger \033[0m \n"
nohup superset run -h 0.0.0.0 -p 8088 --with-threads --reload --debugger >> ~/.logs/superset.run.log 2>&1 &
sleep 1s
head -n 30 ~/.logs/superset.run.log
echo "😁 Superset 启动成功!\n"
echo "🚀 请访问 http://${local_ip}:8088"
continue;;
6 )
echo `date +%EY-%m-%d-%H:%M:%S`: "⏳ 正在启动 Zeppelin..."
conda activate zeppelin_py37
echo -e "\n Executing the command: \033[40;31m zeppelin restart \033[0m \n"
/home/yumingmin/bigdata/zeppelin-0.10.0/bin/zeppelin-daemon.sh restart
sleep 1s
echo "😁 Zeppelin 启动成功!\n"
echo "🚀 请访问 http://${local_ip}:8080"
conda deactivate
continue;;
7 )
echo `date +%EY-%m-%d-%H:%M:%S`: "⏳ 正在启动 RStudio Server..."
echo -e "\n Executing the command: \033[40;31m sudo rstudio-server stop\033[0m \n"
sudo rstudio-server stop
sudo rstudio-server start
sleep 1s
echo "😁 RStudio Server 启动成功!\n"
echo "🚀 请访问 http://${local_ip}:8787"
continue;;
8 )
echo `date +%EY-%m-%d-%H:%M:%S`: "⏳ 正在启动 Dolphin Scheduler..."
echo -e "\n Executing the command: \033[40;31m sudo rstudio-server stop\033[0m \n"
# sudo rstudio-server stop
# sudo rstudio-server start
sleep 1s
echo "😁 Dolphin Scheduler 启动成功!\n"
# echo "🚀 请访问 http://${local_ip}:8787"
continue;;
9 )
echo `date +%EY-%m-%d-%H:%M:%S`: "⏳ 正在挂载阿里云盘..."
cd ~/bigdata/aliyun-driver
if [ -z $aliyun_token ]; then
echo -e "请登录Web端阿里云盘,获取Refresh Token,并输入:\c"
read aliyun_token
export aliyun_token=$aliyun_token
fi
nohup java -jar webdav-2.4.1.jar \
--aliyundrive.refresh-token=${aliyun_token} \
--server.port=5553 \
--aliyundrive.auth.enable=true \
--aliyundrive.auth.user-name=admin \
--aliyundrive.auth.password=admin >> ~/.logs/aliyun.run.log 2>&1 &
sudo mount -t davfs -o noexec http://127.0.0.1:5553/ /home/yumingmin/data/aliyunpan
sleep 2s
head -n 30 ~/.logs/aliyun.run.log
echo "😁 阿里云盘挂载成功!\n"
echo "🚀 数据存放在 ~/data/aliyunpan 目录下,可直接访问 http://${local_ip}:5553/"
cd
continue;;
40 )
echo `date +%EY-%m-%d-%H:%M:%S`: "⏳ 正在关闭Airflow..."
ps -ef | grep -Ei '(airflow webserver)' | grep -v "grep" | awk '{print $2}' | xargs -i kill {}
ps -ef | grep -Ei '(airflow webserver)' | grep -v "grep" | awk '{print $2}' | xargs -i kill {}
ps -ef | grep -Ei '(airflow-webserver)' | grep -v "grep" | awk '{print $2}' | xargs -i kill {}
ps -ef | grep -Ei '(airflow scheduler)' | grep -v "grep" | awk '{print $2}' | xargs -i kill {}
ps -ef | grep -Ei '(airflow-scheduler)' | grep -v "grep" | awk '{print $2}' | xargs -i kill {}
ps -ef | grep -Ei '(airflow serve)' | grep -v "grep" | awk '{print $2}' | xargs -i kill {}
ps -ef | grep -Ei '(airflow celery)' | grep -v "grep" | awk '{print $2}' | xargs -i kill {}
echo "😁 Airflow 关闭完成!"
continue;;
90 )
echo `date +%EY-%m-%d-%H:%M:%S`: "⏳ 正在关闭阿里云盘..."
ps -ef | grep -Ei '(webdav)' | grep -v "grep" | awk '{print $2}' | xargs -i kill {}
sudo umount /mnt/aliyunpan aliyunpan
sleep 2s
echo "😁 阿里云盘关闭成功!\n"
continue;;
esac
done