initialized
This commit is contained in:
parent
28d79d3698
commit
aff7a3209a
39
.devcontainer/Dockerfile
Normal file
39
.devcontainer/Dockerfile
Normal file
|
@ -0,0 +1,39 @@
|
|||
# Note: You can use any Debian/Ubuntu based image you want.
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/base:0-bullseye
|
||||
|
||||
# [Option] Install zsh
|
||||
ARG INSTALL_ZSH="true"
|
||||
# [Option] Upgrade OS packages to their latest versions
|
||||
ARG UPGRADE_PACKAGES="false"
|
||||
# [Option] Enable non-root Docker access in container
|
||||
ARG ENABLE_NONROOT_DOCKER="true"
|
||||
# [Option] Use the OSS Moby CLI instead of the licensed Docker CLI
|
||||
ARG USE_MOBY="true"
|
||||
|
||||
# Enable new "BUILDKIT" mode for Docker CLI
|
||||
ENV DOCKER_BUILDKIT=1
|
||||
|
||||
# Install needed packages and setup non-root user. Use a separate RUN statement to add your
|
||||
# own dependencies. A user of "automatic" attempts to reuse an user ID if one already exists.
|
||||
ARG USERNAME=automatic
|
||||
ARG USER_UID=1000
|
||||
ARG USER_GID=$USER_UID
|
||||
COPY library-scripts/*.sh /tmp/library-scripts/
|
||||
RUN apt-get update \
|
||||
&& /bin/bash /tmp/library-scripts/common-debian.sh "${INSTALL_ZSH}" "${USERNAME}" "${USER_UID}" "${USER_GID}" "${UPGRADE_PACKAGES}" "true" "true" \
|
||||
# Use Docker script from script library to set things up
|
||||
&& /bin/bash /tmp/library-scripts/docker-debian.sh "${ENABLE_NONROOT_DOCKER}" "/var/run/docker-host.sock" "/var/run/docker.sock" "${USERNAME}" \
|
||||
# Clean up
|
||||
&& apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts/
|
||||
|
||||
# Setting the ENTRYPOINT to docker-init.sh will configure non-root access
|
||||
# to the Docker socket. The script will also execute CMD as needed.
|
||||
ENTRYPOINT [ "/usr/local/share/docker-init.sh" ]
|
||||
CMD [ "sleep", "infinity" ]
|
||||
|
||||
# [Optional] Uncomment this section to install additional OS packages.
|
||||
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||
|
||||
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
&& apt-get -y install --no-install-recommends libpq-dev
|
52
.devcontainer/devcontainer.json
Normal file
52
.devcontainer/devcontainer.json
Normal file
|
@ -0,0 +1,52 @@
|
|||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.234.0/containers/docker-from-docker-compose
|
||||
{
|
||||
"name": "beteran-backend-server-edge",
|
||||
"dockerComposeFile": "docker-compose.yml",
|
||||
"service": "app",
|
||||
"workspaceFolder": "/workspace",
|
||||
// Configure tool-specific properties.
|
||||
"customizations": {
|
||||
// Configure properties specific to VS Code.
|
||||
"vscode": {
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {
|
||||
"lldb.verboseLogging": true,
|
||||
"lldb.executable": "/usr/bin/lldb",
|
||||
// VS Code don't watch files under ./target
|
||||
"files.watcherExclude": {
|
||||
"**/target/**": true
|
||||
},
|
||||
"rust-analyzer.checkOnSave.command": "clippy",
|
||||
"editor.tabSize": 2,
|
||||
"editor.insertSpaces": true,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"donjayamanne.githistory",
|
||||
"eamodio.gitlens",
|
||||
"matklad.rust-analyzer",
|
||||
"mhutchie.git-graph",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"mutantdino.resourcemonitor",
|
||||
"serayuzgur.crates",
|
||||
"tamasfe.even-better-toml",
|
||||
"vadimcn.vscode-lldb"
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
// "postCreateCommand": "docker --version",
|
||||
"postCreateCommand": "bash ./.devcontainer/scripts/postCreateCommand.sh",
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "vscode",
|
||||
"features": {
|
||||
"git": "latest",
|
||||
"rust": "latest"
|
||||
}
|
||||
}
|
29
.devcontainer/docker-compose.yml
Normal file
29
.devcontainer/docker-compose.yml
Normal file
|
@ -0,0 +1,29 @@
|
|||
version: '3'
|
||||
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
|
||||
volumes:
|
||||
# Forwards the local Docker socket to the container.
|
||||
- /var/run/docker.sock:/var/run/docker-host.sock
|
||||
# Update this to wherever you want VS Code to mount the folder of your project
|
||||
- ..:/workspace:cached
|
||||
|
||||
# Overrides default command so things don't shut down after the process ends.
|
||||
entrypoint: /usr/local/share/docker-init.sh
|
||||
command: sleep infinity
|
||||
|
||||
# Uncomment the next four lines if you will use a ptrace-based debuggers like C++, Go, and Rust.
|
||||
# cap_add:
|
||||
# - SYS_PTRACE
|
||||
# security_opt:
|
||||
# - seccomp:unconfined
|
||||
|
||||
# Uncomment the next line to use a non-root user for all processes.
|
||||
# user: vscode
|
||||
|
||||
# Use "forwardPorts" in **devcontainer.json** to forward an app port locally.
|
||||
# (Adding the "ports" property to this file will not forward from a Codespace.)
|
454
.devcontainer/library-scripts/common-debian.sh
Normal file
454
.devcontainer/library-scripts/common-debian.sh
Normal file
|
@ -0,0 +1,454 @@
|
|||
#!/usr/bin/env bash
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
|
||||
# Maintainer: The VS Code and Codespaces Teams
|
||||
#
|
||||
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
|
||||
|
||||
set -e
|
||||
|
||||
INSTALL_ZSH=${1:-"true"}
|
||||
USERNAME=${2:-"automatic"}
|
||||
USER_UID=${3:-"automatic"}
|
||||
USER_GID=${4:-"automatic"}
|
||||
UPGRADE_PACKAGES=${5:-"true"}
|
||||
INSTALL_OH_MYS=${6:-"true"}
|
||||
ADD_NON_FREE_PACKAGES=${7:-"false"}
|
||||
SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
|
||||
MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
|
||||
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
|
||||
rm -f /etc/profile.d/00-restore-env.sh
|
||||
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
|
||||
chmod +x /etc/profile.d/00-restore-env.sh
|
||||
|
||||
# If in automatic mode, determine if a user already exists, if not use vscode
|
||||
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
|
||||
USERNAME=""
|
||||
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
|
||||
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
|
||||
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
|
||||
USERNAME=${CURRENT_USER}
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [ "${USERNAME}" = "" ]; then
|
||||
USERNAME=vscode
|
||||
fi
|
||||
elif [ "${USERNAME}" = "none" ]; then
|
||||
USERNAME=root
|
||||
USER_UID=0
|
||||
USER_GID=0
|
||||
fi
|
||||
|
||||
# Load markers to see which steps have already run
|
||||
if [ -f "${MARKER_FILE}" ]; then
|
||||
echo "Marker file found:"
|
||||
cat "${MARKER_FILE}"
|
||||
source "${MARKER_FILE}"
|
||||
fi
|
||||
|
||||
# Ensure apt is in non-interactive to avoid prompts
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Function to call apt-get if needed
|
||||
apt_get_update_if_needed()
|
||||
{
|
||||
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
else
|
||||
echo "Skipping apt-get update."
|
||||
fi
|
||||
}
|
||||
|
||||
# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
|
||||
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
|
||||
|
||||
package_list="apt-utils \
|
||||
openssh-client \
|
||||
gnupg2 \
|
||||
dirmngr \
|
||||
iproute2 \
|
||||
procps \
|
||||
lsof \
|
||||
htop \
|
||||
net-tools \
|
||||
psmisc \
|
||||
curl \
|
||||
wget \
|
||||
rsync \
|
||||
ca-certificates \
|
||||
unzip \
|
||||
zip \
|
||||
nano \
|
||||
vim-tiny \
|
||||
less \
|
||||
jq \
|
||||
lsb-release \
|
||||
apt-transport-https \
|
||||
dialog \
|
||||
libc6 \
|
||||
libgcc1 \
|
||||
libkrb5-3 \
|
||||
libgssapi-krb5-2 \
|
||||
libicu[0-9][0-9] \
|
||||
liblttng-ust[0-9] \
|
||||
libstdc++6 \
|
||||
zlib1g \
|
||||
locales \
|
||||
sudo \
|
||||
ncdu \
|
||||
man-db \
|
||||
strace \
|
||||
manpages \
|
||||
manpages-dev \
|
||||
init-system-helpers"
|
||||
|
||||
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
|
||||
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
|
||||
# Bring in variables from /etc/os-release like VERSION_CODENAME
|
||||
. /etc/os-release
|
||||
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
|
||||
# Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
|
||||
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
package_list="${package_list} manpages-posix manpages-posix-dev"
|
||||
else
|
||||
apt_get_update_if_needed
|
||||
fi
|
||||
|
||||
# Install libssl1.1 if available
|
||||
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
|
||||
package_list="${package_list} libssl1.1"
|
||||
fi
|
||||
|
||||
# Install appropriate version of libssl1.0.x if available
|
||||
libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
|
||||
if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
|
||||
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
|
||||
# Debian 9
|
||||
package_list="${package_list} libssl1.0.2"
|
||||
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
|
||||
# Ubuntu 18.04, 16.04, earlier
|
||||
package_list="${package_list} libssl1.0.0"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Packages to verify are installed: ${package_list}"
|
||||
apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 )
|
||||
|
||||
# Install git if not already installed (may be more recent than distro version)
|
||||
if ! type git > /dev/null 2>&1; then
|
||||
apt-get -y install --no-install-recommends git
|
||||
fi
|
||||
|
||||
PACKAGES_ALREADY_INSTALLED="true"
|
||||
fi
|
||||
|
||||
# Get to latest versions of all packages
|
||||
if [ "${UPGRADE_PACKAGES}" = "true" ]; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y upgrade --no-install-recommends
|
||||
apt-get autoremove -y
|
||||
fi
|
||||
|
||||
# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
|
||||
# Common need for both applications and things like the agnoster ZSH theme.
|
||||
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
|
||||
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
|
||||
locale-gen
|
||||
LOCALE_ALREADY_SET="true"
|
||||
fi
|
||||
|
||||
# Create or update a non-root user to match UID/GID.
|
||||
group_name="${USERNAME}"
|
||||
if id -u ${USERNAME} > /dev/null 2>&1; then
|
||||
# User exists, update if needed
|
||||
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then
|
||||
group_name="$(id -gn $USERNAME)"
|
||||
groupmod --gid $USER_GID ${group_name}
|
||||
usermod --gid $USER_GID $USERNAME
|
||||
fi
|
||||
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
|
||||
usermod --uid $USER_UID $USERNAME
|
||||
fi
|
||||
else
|
||||
# Create user
|
||||
if [ "${USER_GID}" = "automatic" ]; then
|
||||
groupadd $USERNAME
|
||||
else
|
||||
groupadd --gid $USER_GID $USERNAME
|
||||
fi
|
||||
if [ "${USER_UID}" = "automatic" ]; then
|
||||
useradd -s /bin/bash --gid $USERNAME -m $USERNAME
|
||||
else
|
||||
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
|
||||
fi
|
||||
fi
|
||||
|
||||
# Add add sudo support for non-root user
|
||||
if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
|
||||
echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME
|
||||
chmod 0440 /etc/sudoers.d/$USERNAME
|
||||
EXISTING_NON_ROOT_USER="${USERNAME}"
|
||||
fi
|
||||
|
||||
# ** Shell customization section **
|
||||
if [ "${USERNAME}" = "root" ]; then
|
||||
user_rc_path="/root"
|
||||
else
|
||||
user_rc_path="/home/${USERNAME}"
|
||||
fi
|
||||
|
||||
# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
|
||||
if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then
|
||||
cp /etc/skel/.bashrc "${user_rc_path}/.bashrc"
|
||||
fi
|
||||
|
||||
# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
|
||||
if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then
|
||||
cp /etc/skel/.profile "${user_rc_path}/.profile"
|
||||
fi
|
||||
|
||||
# .bashrc/.zshrc snippet
|
||||
rc_snippet="$(cat << 'EOF'
|
||||
|
||||
if [ -z "${USER}" ]; then export USER=$(whoami); fi
|
||||
if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
|
||||
|
||||
# Display optional first run image specific notice if configured and terminal is interactive
|
||||
if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
|
||||
if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
|
||||
cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
|
||||
elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
|
||||
cat "/workspaces/.codespaces/shared/first-run-notice.txt"
|
||||
fi
|
||||
mkdir -p "$HOME/.config/vscode-dev-containers"
|
||||
# Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
|
||||
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
|
||||
fi
|
||||
|
||||
# Set the default git editor if not already set
|
||||
if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
|
||||
if [ "${TERM_PROGRAM}" = "vscode" ]; then
|
||||
if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then
|
||||
export GIT_EDITOR="code-insiders --wait"
|
||||
else
|
||||
export GIT_EDITOR="code --wait"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
EOF
|
||||
)"
|
||||
|
||||
# code shim, it fallbacks to code-insiders if code is not available
|
||||
cat << 'EOF' > /usr/local/bin/code
|
||||
#!/bin/sh
|
||||
|
||||
get_in_path_except_current() {
|
||||
which -a "$1" | grep -A1 "$0" | grep -v "$0"
|
||||
}
|
||||
|
||||
code="$(get_in_path_except_current code)"
|
||||
|
||||
if [ -n "$code" ]; then
|
||||
exec "$code" "$@"
|
||||
elif [ "$(command -v code-insiders)" ]; then
|
||||
exec code-insiders "$@"
|
||||
else
|
||||
echo "code or code-insiders is not installed" >&2
|
||||
exit 127
|
||||
fi
|
||||
EOF
|
||||
chmod +x /usr/local/bin/code
|
||||
|
||||
# systemctl shim - tells people to use 'service' if systemd is not running
|
||||
cat << 'EOF' > /usr/local/bin/systemctl
|
||||
#!/bin/sh
|
||||
set -e
|
||||
if [ -d "/run/systemd/system" ]; then
|
||||
exec /bin/systemctl/systemctl "$@"
|
||||
else
|
||||
echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services instead. e.g.: \n\nservice --status-all'
|
||||
fi
|
||||
EOF
|
||||
chmod +x /usr/local/bin/systemctl
|
||||
|
||||
# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
|
||||
codespaces_bash="$(cat \
|
||||
<<'EOF'
|
||||
|
||||
# Codespaces bash prompt theme
|
||||
__bash_prompt() {
|
||||
local userpart='`export XIT=$? \
|
||||
&& [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
|
||||
&& [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
|
||||
local gitbranch='`\
|
||||
if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \
|
||||
export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
|
||||
if [ "${BRANCH}" != "" ]; then \
|
||||
echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
|
||||
&& if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
|
||||
echo -n " \[\033[1;33m\]✗"; \
|
||||
fi \
|
||||
&& echo -n "\[\033[0;36m\]) "; \
|
||||
fi; \
|
||||
fi`'
|
||||
local lightblue='\[\033[1;34m\]'
|
||||
local removecolor='\[\033[0m\]'
|
||||
PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
|
||||
unset -f __bash_prompt
|
||||
}
|
||||
__bash_prompt
|
||||
|
||||
EOF
|
||||
)"
|
||||
|
||||
codespaces_zsh="$(cat \
|
||||
<<'EOF'
|
||||
# Codespaces zsh prompt theme
|
||||
__zsh_prompt() {
|
||||
local prompt_username
|
||||
if [ ! -z "${GITHUB_USER}" ]; then
|
||||
prompt_username="@${GITHUB_USER}"
|
||||
else
|
||||
prompt_username="%n"
|
||||
fi
|
||||
PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
|
||||
PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
|
||||
PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status
|
||||
PROMPT+='%{$fg[white]%}$ %{$reset_color%}'
|
||||
unset -f __zsh_prompt
|
||||
}
|
||||
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
|
||||
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
|
||||
ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
|
||||
ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
|
||||
__zsh_prompt
|
||||
|
||||
EOF
|
||||
)"
|
||||
|
||||
# Add RC snippet and custom bash prompt
|
||||
if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
|
||||
echo "${rc_snippet}" >> /etc/bash.bashrc
|
||||
echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc"
|
||||
echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc"
|
||||
if [ "${USERNAME}" != "root" ]; then
|
||||
echo "${codespaces_bash}" >> "/root/.bashrc"
|
||||
echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc"
|
||||
fi
|
||||
chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
|
||||
RC_SNIPPET_ALREADY_ADDED="true"
|
||||
fi
|
||||
|
||||
# Optionally install and configure zsh and Oh My Zsh!
|
||||
if [ "${INSTALL_ZSH}" = "true" ]; then
|
||||
if ! type zsh > /dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get install -y zsh
|
||||
fi
|
||||
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
|
||||
echo "${rc_snippet}" >> /etc/zsh/zshrc
|
||||
ZSH_ALREADY_INSTALLED="true"
|
||||
fi
|
||||
|
||||
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
|
||||
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
|
||||
oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
|
||||
if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
|
||||
template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
|
||||
user_rc_file="${user_rc_path}/.zshrc"
|
||||
umask g-w,o-w
|
||||
mkdir -p ${oh_my_install_dir}
|
||||
git clone --depth=1 \
|
||||
-c core.eol=lf \
|
||||
-c core.autocrlf=false \
|
||||
-c fsck.zeroPaddedFilemode=ignore \
|
||||
-c fetch.fsck.zeroPaddedFilemode=ignore \
|
||||
-c receive.fsck.zeroPaddedFilemode=ignore \
|
||||
"https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
|
||||
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
|
||||
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
|
||||
|
||||
mkdir -p ${oh_my_install_dir}/custom/themes
|
||||
echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
|
||||
# Shrink git while still enabling updates
|
||||
cd "${oh_my_install_dir}"
|
||||
git repack -a -d -f --depth=1 --window=1
|
||||
# Copy to non-root user if one is specified
|
||||
if [ "${USERNAME}" != "root" ]; then
|
||||
cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
|
||||
chown -R ${USERNAME}:${group_name} "${user_rc_path}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Persist image metadata info, script if meta.env found in same directory
|
||||
meta_info_script="$(cat << 'EOF'
|
||||
#!/bin/sh
|
||||
. /usr/local/etc/vscode-dev-containers/meta.env
|
||||
|
||||
# Minimal output
|
||||
if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
|
||||
echo "${VERSION}"
|
||||
exit 0
|
||||
elif [ "$1" = "release" ]; then
|
||||
echo "${GIT_REPOSITORY_RELEASE}"
|
||||
exit 0
|
||||
elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
|
||||
echo "${CONTENTS_URL}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
#Full output
|
||||
echo
|
||||
echo "Development container image information"
|
||||
echo
|
||||
if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
|
||||
if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
|
||||
if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
|
||||
if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
|
||||
if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
|
||||
if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
|
||||
if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
|
||||
echo
|
||||
EOF
|
||||
)"
|
||||
if [ -f "${SCRIPT_DIR}/meta.env" ]; then
|
||||
mkdir -p /usr/local/etc/vscode-dev-containers/
|
||||
cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
|
||||
echo "${meta_info_script}" > /usr/local/bin/devcontainer-info
|
||||
chmod +x /usr/local/bin/devcontainer-info
|
||||
fi
|
||||
|
||||
# Write marker file
|
||||
mkdir -p "$(dirname "${MARKER_FILE}")"
|
||||
echo -e "\
|
||||
PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
|
||||
LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
|
||||
EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
|
||||
RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
|
||||
ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}"
|
||||
|
||||
echo "Done!"
|
354
.devcontainer/library-scripts/docker-debian.sh
Normal file
354
.devcontainer/library-scripts/docker-debian.sh
Normal file
|
@ -0,0 +1,354 @@
|
|||
#!/usr/bin/env bash
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker.md
|
||||
# Maintainer: The VS Code and Codespaces Teams
|
||||
#
|
||||
# Syntax: ./docker-debian.sh [enable non-root docker socket access flag] [source socket] [target socket] [non-root user] [use moby] [CLI version] [Major version for docker-compose]
|
||||
|
||||
ENABLE_NONROOT_DOCKER=${1:-"true"}
|
||||
SOURCE_SOCKET=${2:-"/var/run/docker-host.sock"}
|
||||
TARGET_SOCKET=${3:-"/var/run/docker.sock"}
|
||||
USERNAME=${4:-"automatic"}
|
||||
USE_MOBY=${5:-"true"}
|
||||
DOCKER_VERSION=${6:-"latest"}
|
||||
DOCKER_DASH_COMPOSE_VERSION=${7:-"v1"} # v1 or v2
|
||||
MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc"
|
||||
DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES="buster bullseye bionic focal jammy"
|
||||
DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES="buster bullseye bionic focal hirsute impish jammy"
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Determine the appropriate non-root user
|
||||
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
|
||||
USERNAME=""
|
||||
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
|
||||
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
|
||||
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
|
||||
USERNAME=${CURRENT_USER}
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [ "${USERNAME}" = "" ]; then
|
||||
USERNAME=root
|
||||
fi
|
||||
elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
|
||||
USERNAME=root
|
||||
fi
|
||||
|
||||
# Get central common setting
|
||||
get_common_setting() {
|
||||
if [ "${common_settings_file_loaded}" != "true" ]; then
|
||||
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
|
||||
common_settings_file_loaded=true
|
||||
fi
|
||||
if [ -f "/tmp/vsdc-settings.env" ]; then
|
||||
local multi_line=""
|
||||
if [ "$2" = "true" ]; then multi_line="-z"; fi
|
||||
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
|
||||
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
|
||||
fi
|
||||
echo "$1=${!1}"
|
||||
}
|
||||
|
||||
# Function to run apt-get if needed
|
||||
apt_get_update_if_needed()
|
||||
{
|
||||
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
else
|
||||
echo "Skipping apt-get update."
|
||||
fi
|
||||
}
|
||||
|
||||
# Checks if packages are installed and installs them if not
|
||||
check_packages() {
|
||||
if ! dpkg -s "$@" > /dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install --no-install-recommends "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Figure out correct version of a three part version number is not passed
|
||||
find_version_from_git_tags() {
|
||||
local variable_name=$1
|
||||
local requested_version=${!variable_name}
|
||||
if [ "${requested_version}" = "none" ]; then return; fi
|
||||
local repository=$2
|
||||
local prefix=${3:-"tags/v"}
|
||||
local separator=${4:-"."}
|
||||
local last_part_optional=${5:-"false"}
|
||||
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
|
||||
local escaped_separator=${separator//./\\.}
|
||||
local last_part
|
||||
if [ "${last_part_optional}" = "true" ]; then
|
||||
last_part="(${escaped_separator}[0-9]+)?"
|
||||
else
|
||||
last_part="${escaped_separator}[0-9]+"
|
||||
fi
|
||||
local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
|
||||
local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
|
||||
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
|
||||
declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
|
||||
else
|
||||
set +e
|
||||
declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
|
||||
set -e
|
||||
fi
|
||||
fi
|
||||
if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then
|
||||
echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "${variable_name}=${!variable_name}"
|
||||
}
|
||||
|
||||
# Ensure apt is in non-interactive to avoid prompts
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Install dependencies
|
||||
check_packages apt-transport-https curl ca-certificates gnupg2 dirmngr
|
||||
if ! type git > /dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install git
|
||||
fi
|
||||
|
||||
# Source /etc/os-release to get OS info
|
||||
. /etc/os-release
|
||||
# Fetch host/container arch.
|
||||
architecture="$(dpkg --print-architecture)"
|
||||
|
||||
# Check if distro is suppported
|
||||
if [ "${USE_MOBY}" = "true" ]; then
|
||||
# 'get_common_setting' allows attribute to be updated remotely
|
||||
get_common_setting DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES
|
||||
if [[ "${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then
|
||||
err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS distribution"
|
||||
err "Support distributions include: ${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}"
|
||||
exit 1
|
||||
fi
|
||||
echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}'"
|
||||
else
|
||||
get_common_setting DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES
|
||||
if [[ "${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then
|
||||
err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, please choose a compatible OS distribution"
|
||||
err "Support distributions include: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}"
|
||||
exit 1
|
||||
fi
|
||||
echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}'"
|
||||
fi
|
||||
|
||||
# Set up the necessary apt repos (either Microsoft's or Docker's)
|
||||
if [ "${USE_MOBY}" = "true" ]; then
|
||||
|
||||
cli_package_name="moby-cli"
|
||||
|
||||
# Import key safely and import Microsoft apt repo
|
||||
get_common_setting MICROSOFT_GPG_KEYS_URI
|
||||
curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg
|
||||
echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list
|
||||
else
|
||||
# Name of proprietary engine package
|
||||
cli_package_name="docker-ce-cli"
|
||||
|
||||
# Import key safely and import Docker apt repo
|
||||
curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list
|
||||
fi
|
||||
|
||||
# Refresh apt lists
|
||||
apt-get update
|
||||
|
||||
# Soft version matching for CLI
|
||||
if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then
|
||||
# Empty, meaning grab whatever "latest" is in apt repo
|
||||
cli_version_suffix=""
|
||||
else
|
||||
# Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...)
|
||||
docker_version_dot_escaped="${DOCKER_VERSION//./\\.}"
|
||||
docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}"
|
||||
# Regex needs to handle debian package version number format: https://www.systutorials.com/docs/linux/man/5-deb-version/
|
||||
docker_version_regex="^(.+:)?${docker_version_dot_plus_escaped}([\\.\\+ ~:-]|$)"
|
||||
set +e # Don't exit if finding version fails - will handle gracefully
|
||||
cli_version_suffix="=$(apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")"
|
||||
set -e
|
||||
if [ -z "${cli_version_suffix}" ] || [ "${cli_version_suffix}" = "=" ]; then
|
||||
echo "(!) No full or partial Docker / Moby version match found for \"${DOCKER_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:"
|
||||
apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+'
|
||||
exit 1
|
||||
fi
|
||||
echo "cli_version_suffix ${cli_version_suffix}"
|
||||
fi
|
||||
|
||||
# Install Docker / Moby CLI if not already installed
|
||||
if type docker > /dev/null 2>&1; then
|
||||
echo "Docker / Moby CLI already installed."
|
||||
else
|
||||
if [ "${USE_MOBY}" = "true" ]; then
|
||||
apt-get -y install --no-install-recommends moby-cli${cli_version_suffix} moby-buildx
|
||||
apt-get -y install --no-install-recommends moby-compose || echo "(*) Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping."
|
||||
else
|
||||
apt-get -y install --no-install-recommends docker-ce-cli${cli_version_suffix}
|
||||
fi
|
||||
fi
|
||||
|
||||
# Install Docker Compose if not already installed and is on a supported architecture
|
||||
if type docker-compose > /dev/null 2>&1; then
|
||||
echo "Docker Compose already installed."
|
||||
else
|
||||
TARGET_COMPOSE_ARCH="$(uname -m)"
|
||||
if [ "${TARGET_COMPOSE_ARCH}" = "amd64" ]; then
|
||||
TARGET_COMPOSE_ARCH="x86_64"
|
||||
fi
|
||||
if [ "${TARGET_COMPOSE_ARCH}" != "x86_64" ]; then
|
||||
# Use pip to get a version that runns on this architecture
|
||||
if ! dpkg -s python3-minimal python3-pip libffi-dev python3-venv > /dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install python3-minimal python3-pip libffi-dev python3-venv
|
||||
fi
|
||||
export PIPX_HOME=/usr/local/pipx
|
||||
mkdir -p ${PIPX_HOME}
|
||||
export PIPX_BIN_DIR=/usr/local/bin
|
||||
export PYTHONUSERBASE=/tmp/pip-tmp
|
||||
export PIP_CACHE_DIR=/tmp/pip-tmp/cache
|
||||
pipx_bin=pipx
|
||||
if ! type pipx > /dev/null 2>&1; then
|
||||
pip3 install --disable-pip-version-check --no-cache-dir --user pipx
|
||||
pipx_bin=/tmp/pip-tmp/bin/pipx
|
||||
fi
|
||||
${pipx_bin} install --pip-args '--no-cache-dir --force-reinstall' docker-compose
|
||||
rm -rf /tmp/pip-tmp
|
||||
else
|
||||
compose_v1_version="1"
|
||||
find_version_from_git_tags compose_v1_version "https://github.com/docker/compose" "tags/"
|
||||
echo "(*) Installing docker-compose ${compose_v1_version}..."
|
||||
curl -fsSL "https://github.com/docker/compose/releases/download/${compose_v1_version}/docker-compose-Linux-x86_64" -o /usr/local/bin/docker-compose
|
||||
chmod +x /usr/local/bin/docker-compose
|
||||
fi
|
||||
fi
|
||||
|
||||
# Install docker-compose switch if not already installed - https://github.com/docker/compose-switch#manual-installation
|
||||
current_v1_compose_path="$(which docker-compose)"
|
||||
target_v1_compose_path="$(dirname "${current_v1_compose_path}")/docker-compose-v1"
|
||||
if ! type compose-switch > /dev/null 2>&1; then
|
||||
echo "(*) Installing compose-switch..."
|
||||
compose_switch_version="latest"
|
||||
find_version_from_git_tags compose_switch_version "https://github.com/docker/compose-switch"
|
||||
curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch
|
||||
chmod +x /usr/local/bin/compose-switch
|
||||
# TODO: Verify checksum once available: https://github.com/docker/compose-switch/issues/11
|
||||
|
||||
# Setup v1 CLI as alternative in addition to compose-switch (which maps to v2)
|
||||
mv "${current_v1_compose_path}" "${target_v1_compose_path}"
|
||||
update-alternatives --install /usr/local/bin/docker-compose docker-compose /usr/local/bin/compose-switch 99
|
||||
update-alternatives --install /usr/local/bin/docker-compose docker-compose "${target_v1_compose_path}" 1
|
||||
fi
|
||||
if [ "${DOCKER_DASH_COMPOSE_VERSION}" = "v1" ]; then
|
||||
update-alternatives --set docker-compose "${target_v1_compose_path}"
|
||||
else
|
||||
update-alternatives --set docker-compose /usr/local/bin/compose-switch
|
||||
fi
|
||||
|
||||
# If init file already exists, exit
|
||||
if [ -f "/usr/local/share/docker-init.sh" ]; then
|
||||
exit 0
|
||||
fi
|
||||
echo "docker-init doesnt exist, adding..."
|
||||
|
||||
# By default, make the source and target sockets the same
|
||||
if [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ]; then
|
||||
touch "${SOURCE_SOCKET}"
|
||||
ln -s "${SOURCE_SOCKET}" "${TARGET_SOCKET}"
|
||||
fi
|
||||
|
||||
# Add a stub if not adding non-root user access, user is root
|
||||
if [ "${ENABLE_NONROOT_DOCKER}" = "false" ] || [ "${USERNAME}" = "root" ]; then
|
||||
echo -e '#!/usr/bin/env bash\nexec "$@"' > /usr/local/share/docker-init.sh
|
||||
chmod +x /usr/local/share/docker-init.sh
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Setup a docker group in the event the docker socket's group is not root
|
||||
if ! grep -qE '^docker:' /etc/group; then
|
||||
groupadd --system docker
|
||||
fi
|
||||
usermod -aG docker "${USERNAME}"
|
||||
DOCKER_GID="$(grep -oP '^docker:x:\K[^:]+' /etc/group)"
|
||||
|
||||
# If enabling non-root access and specified user is found, setup socat and add script
|
||||
chown -h "${USERNAME}":root "${TARGET_SOCKET}"
|
||||
if ! dpkg -s socat > /dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install socat
|
||||
fi
|
||||
tee /usr/local/share/docker-init.sh > /dev/null \
|
||||
<< EOF
|
||||
#!/usr/bin/env bash
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
|
||||
set -e
|
||||
|
||||
SOCAT_PATH_BASE=/tmp/vscr-docker-from-docker
|
||||
SOCAT_LOG=\${SOCAT_PATH_BASE}.log
|
||||
SOCAT_PID=\${SOCAT_PATH_BASE}.pid
|
||||
|
||||
# Wrapper function to only use sudo if not already root
|
||||
sudoIf()
|
||||
{
|
||||
if [ "\$(id -u)" -ne 0 ]; then
|
||||
sudo "\$@"
|
||||
else
|
||||
"\$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Log messages
|
||||
log()
|
||||
{
|
||||
echo -e "[\$(date)] \$@" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
|
||||
}
|
||||
|
||||
echo -e "\n** \$(date) **" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
|
||||
log "Ensuring ${USERNAME} has access to ${SOURCE_SOCKET} via ${TARGET_SOCKET}"
|
||||
|
||||
# If enabled, try to update the docker group with the right GID. If the group is root,
|
||||
# fall back on using socat to forward the docker socket to another unix socket so
|
||||
# that we can set permissions on it without affecting the host.
|
||||
if [ "${ENABLE_NONROOT_DOCKER}" = "true" ] && [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ] && [ "${USERNAME}" != "root" ] && [ "${USERNAME}" != "0" ]; then
|
||||
SOCKET_GID=\$(stat -c '%g' ${SOURCE_SOCKET})
|
||||
if [ "\${SOCKET_GID}" != "0" ] && [ "\${SOCKET_GID}" != "${DOCKER_GID}" ] && ! grep -E ".+:x:\${SOCKET_GID}" /etc/group; then
|
||||
sudoIf groupmod --gid "\${SOCKET_GID}" docker
|
||||
else
|
||||
# Enable proxy if not already running
|
||||
if [ ! -f "\${SOCAT_PID}" ] || ! ps -p \$(cat \${SOCAT_PID}) > /dev/null; then
|
||||
log "Enabling socket proxy."
|
||||
log "Proxying ${SOURCE_SOCKET} to ${TARGET_SOCKET} for vscode"
|
||||
sudoIf rm -rf ${TARGET_SOCKET}
|
||||
(sudoIf socat UNIX-LISTEN:${TARGET_SOCKET},fork,mode=660,user=${USERNAME} UNIX-CONNECT:${SOURCE_SOCKET} 2>&1 | sudoIf tee -a \${SOCAT_LOG} > /dev/null & echo "\$!" | sudoIf tee \${SOCAT_PID} > /dev/null)
|
||||
else
|
||||
log "Socket proxy already running."
|
||||
fi
|
||||
fi
|
||||
log "Success"
|
||||
fi
|
||||
|
||||
# Execute whatever commands were passed in (if any). This allows us
|
||||
# to set this script to ENTRYPOINT while still executing the default CMD.
|
||||
set +e
|
||||
exec "\$@"
|
||||
EOF
|
||||
chmod +x /usr/local/share/docker-init.sh
|
||||
chown ${USERNAME}:root /usr/local/share/docker-init.sh
|
||||
echo "Done!"
|
5
.devcontainer/rust-toolchain.toml
Normal file
5
.devcontainer/rust-toolchain.toml
Normal file
|
@ -0,0 +1,5 @@
|
|||
[toolchain]
|
||||
channel = "stable"
|
||||
profile = "minimal"
|
||||
components = ["clippy", "rustfmt"]
|
||||
targets = []
|
4
.devcontainer/scripts/postCreateCommand.sh
Normal file
4
.devcontainer/scripts/postCreateCommand.sh
Normal file
|
@ -0,0 +1,4 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
7
.gitignore
vendored
Normal file
7
.gitignore
vendored
Normal file
|
@ -0,0 +1,7 @@
|
|||
.DS_Store
|
||||
/build
|
||||
|
||||
# Added by cargo
|
||||
|
||||
/target
|
||||
Cargo.lock
|
75
.rustfmt.toml
Normal file
75
.rustfmt.toml
Normal file
|
@ -0,0 +1,75 @@
|
|||
# https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=
|
||||
array_width = 60
|
||||
attr_fn_like_width = 70
|
||||
binop_separator = "Front" # "Front", "Back"
|
||||
blank_lines_lower_bound = 0
|
||||
blank_lines_upper_bound = 1
|
||||
brace_style = "SameLineWhere" #"AlwaysNextLine", "PreferSameLine", "SameLineWhere"
|
||||
chain_width = 60
|
||||
color = "Auto" #"Auto", "Always", "Never"
|
||||
combine_control_expr = true # true, false
|
||||
comment_width = 80
|
||||
condense_wildcard_suffixes = false # true, false
|
||||
control_brace_style = "AlwaysSameLine" # "AlwaysNextLine", "AlwaysSameLine", "ClosingNextLine"
|
||||
disable_all_formatting = false # true, false
|
||||
edition = "2015" # "2015", "2018", "2021"
|
||||
empty_item_single_line = true # true, false
|
||||
enum_discrim_align_threshold = 0
|
||||
error_on_line_overflow = false # true, false
|
||||
error_on_unformatted = false # true, false
|
||||
fn_args_layout = "Tall" # "Compressed", "Tall", "Vertical"
|
||||
fn_call_width = 60
|
||||
fn_single_line = false # true, false
|
||||
force_explicit_abi = true # true, false
|
||||
force_multiline_blocks = false # true, false
|
||||
format_code_in_doc_comments = false # true, false
|
||||
format_generated_files = false # true, false
|
||||
format_macro_matchers = false # true, false
|
||||
format_macro_bodies = true # true, false
|
||||
format_strings = false # true, false
|
||||
group_imports = "Preserve" # "Preserve", "StdExternalCrate"
|
||||
hard_tabs = false # true, false
|
||||
hex_literal_case = "Preserve" # "Upper", "Lower"
|
||||
hide_parse_errors = false # true, false
|
||||
ignore = []
|
||||
imports_indent = "Block" # "Block", "Visual"
|
||||
imports_layout = "Mixed" # "Horizontal", "HorizontalVertical", "Mixed", "Vertical"
|
||||
indent_style = "Block" # "Block", "Visual"
|
||||
inline_attribute_width = 0
|
||||
license_template_path = ""
|
||||
match_arm_blocks = true # true, false
|
||||
match_arm_leading_pipes = "Never" # "Always", "Never", "Preserve"
|
||||
match_block_trailing_comma = false # true, false
|
||||
max_width = 100
|
||||
merge_derives = true # true, false
|
||||
imports_granularity = "Preserve" # "Preserve", "Crate", "Module", "Item", "One"
|
||||
merge_imports = false # true, false
|
||||
newline_style = "Auto" # "Auto", "Native", "Unix", "Windows"
|
||||
normalize_comments = false # true, false
|
||||
normalize_doc_attributes = false # true, false
|
||||
overflow_delimited_expr = false # true, false
|
||||
remove_nested_parens = true # true, false
|
||||
reorder_impl_items = false # true, false
|
||||
reorder_imports = true # true, false
|
||||
reorder_modules = true # true, false
|
||||
report_fixme = "Never" # "Always", "Unnumbered", "Never"
|
||||
report_todo = "Never" # "Always", "Unnumbered", "Never"
|
||||
skip_children = false # true, false
|
||||
single_line_if_else_max_width = 50
|
||||
space_after_colon = true # true, false
|
||||
space_before_colon = false # true, false
|
||||
spaces_around_ranges = false # true, false
|
||||
struct_field_align_threshold = 0
|
||||
struct_lit_single_line = true # true, false
|
||||
struct_lit_width = 18
|
||||
struct_variant_width = 35
|
||||
tab_spaces = 2
|
||||
trailing_comma = "Vertical" # "Always", "Never", "Vertical"
|
||||
trailing_semicolon = true # true, false
|
||||
type_punctuation_density = "Wide" # "Compressed", "Wide"
|
||||
unstable_features = false # true, false
|
||||
use_field_init_shorthand = false # true, false
|
||||
use_small_heuristics = "Default" # "Default", "Off", "Max"
|
||||
use_try_shorthand = false # true, false
|
||||
where_single_line = false # true, false
|
||||
wrap_comments = false # true, false
|
53
.vscode/launch.json
vendored
Normal file
53
.vscode/launch.json
vendored
Normal file
|
@ -0,0 +1,53 @@
|
|||
{
|
||||
// Use IntelliSense to learn about possible attributes.
|
||||
// Hover to view descriptions of existing attributes.
|
||||
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Debug executable 'beteran-backend-server-edge'",
|
||||
"cargo": {
|
||||
"args": [
|
||||
"build",
|
||||
"--bin=beteran-backend-server-edge",
|
||||
"--package=beteran-backend-server-edge"
|
||||
],
|
||||
"filter": {
|
||||
"name": "beteran-backend-server-edge",
|
||||
"kind": "bin"
|
||||
}
|
||||
},
|
||||
"env": {
|
||||
"URL_BROKER": "nats://192.168.50.200:4222",
|
||||
"QUEUE_BROKER": "bet.beteran",
|
||||
},
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"type": "lldb",
|
||||
"request": "launch",
|
||||
"name": "Debug unit tests in executable 'beteran-backend-server-edge'",
|
||||
"cargo": {
|
||||
"args": [
|
||||
"test",
|
||||
"--no-run",
|
||||
"--bin=beteran-backend-server-edge",
|
||||
"--package=beteran-backend-server-edge"
|
||||
],
|
||||
"filter": {
|
||||
"name": "beteran-backend-server-edge",
|
||||
"kind": "bin"
|
||||
}
|
||||
},
|
||||
"env": {
|
||||
"URL_BROKER": "nats://192.168.50.200:4222",
|
||||
"QUEUE_BROKER": "bet.beteran",
|
||||
},
|
||||
"args": [],
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
]
|
||||
}
|
25
Cargo.toml
Normal file
25
Cargo.toml
Normal file
|
@ -0,0 +1,25 @@
|
|||
[package]
|
||||
name = "beteran-backend-server-edge"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[[bin]]
|
||||
name = "beteran-backend-server-edge"
|
||||
path = "./src/main.rs"
|
||||
|
||||
|
||||
[dependencies]
|
||||
futures = { version = "0", default-features = false, features = [
|
||||
"async-await",
|
||||
] }
|
||||
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
|
||||
prost = { version = "0" }
|
||||
nats = { version = "0" }
|
||||
base64 = { version = "0" }
|
||||
|
||||
beteran-protobuf-rust = { git = "https://gitlab.loafle.net/bet/beteran-protobuf-rust.git", tag = "v0.1.20-snapshot" }
|
||||
beteran-common-rust = { git = "https://gitlab.loafle.net/bet/beteran-common-rust.git", tag = "v0.1.0-snapshot" }
|
||||
|
||||
[build-dependencies]
|
1
src/identity/mod.rs
Normal file
1
src/identity/mod.rs
Normal file
|
@ -0,0 +1 @@
|
|||
pub mod service;
|
447
src/identity/service.rs
Normal file
447
src/identity/service.rs
Normal file
|
@ -0,0 +1,447 @@
|
|||
use beteran_common_rust as bcr;
|
||||
use beteran_protobuf_rust as bpr;
|
||||
use prost::Message;
|
||||
|
||||
///
|
||||
pub struct Service {
|
||||
connection_broker: nats::asynk::Connection,
|
||||
queue_broker: String,
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for Service {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
f.debug_struct("beteran-backend-server-edge::identity::service::Service")
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl Service {
|
||||
///
|
||||
pub fn new(connection_broker: nats::asynk::Connection, queue_broker: String) -> Service {
|
||||
Service {
|
||||
connection_broker,
|
||||
queue_broker,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn subscribe(&self) -> std::result::Result<(), std::boxed::Box<dyn std::error::Error>> {
|
||||
futures::try_join!(
|
||||
self.check_username_for_duplication(),
|
||||
self.check_nickname_for_duplication(),
|
||||
self.captcha(),
|
||||
self.signin(),
|
||||
)
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
fn get_client_in_header(
|
||||
&self,
|
||||
message: &nats::asynk::Message,
|
||||
) -> Result<bpr::models::core::network::Client, bcr::error::rpc::Error> {
|
||||
match &message.headers {
|
||||
Some(headers) => {
|
||||
let client = match headers.get(bpr::c2se::core::network::HEADER_CLIENT) {
|
||||
Some(c) => {
|
||||
let msg = base64::decode(c).map_err(|e| {
|
||||
bcr::error::rpc::Error::Parse(bcr::error::rpc::Parse {
|
||||
message: format!("invalid header: {}", e),
|
||||
})
|
||||
})?;
|
||||
bpr::models::core::network::Client::decode(msg.as_slice()).map_err(|e| {
|
||||
bcr::error::rpc::Error::Parse(bcr::error::rpc::Parse {
|
||||
message: format!("invalid header: {}", e),
|
||||
})
|
||||
})?
|
||||
}
|
||||
None => {
|
||||
return Err(bcr::error::rpc::Error::Parse(bcr::error::rpc::Parse {
|
||||
message: "invalid client information".to_string(),
|
||||
}));
|
||||
}
|
||||
};
|
||||
|
||||
Ok(client)
|
||||
}
|
||||
None => Err(bcr::error::rpc::Error::Parse(bcr::error::rpc::Parse {
|
||||
message: "invalid header".to_string(),
|
||||
})),
|
||||
}
|
||||
}
|
||||
|
||||
async fn check_username_for_duplication(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let s = self
|
||||
.connection_broker
|
||||
.queue_subscribe(
|
||||
bpr::c2se::backend::identity::SUBJECT_CHECK_USERNAME_FOR_DUPLICATION,
|
||||
self.queue_broker.as_str(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
while let Some(message) = s.next().await {
|
||||
if let Err(e) = async {
|
||||
let client = self.get_client_in_header(&message)?;
|
||||
|
||||
let req = bpr::c2se::common::identity::CheckUsernameForDuplicationRequest::decode(
|
||||
message.data.as_slice(),
|
||||
)
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::InvalidRequest(bcr::error::rpc::InvalidRequest {
|
||||
message: format!("invalid request: {}", e),
|
||||
})
|
||||
})?;
|
||||
|
||||
let ss_check_username_for_duplication_req =
|
||||
bpr::ss::member::identity::CheckUsernameForDuplicationRequest {
|
||||
client: Some(client),
|
||||
username: req.username,
|
||||
};
|
||||
|
||||
let ss_check_username_for_duplication_res_msg = self
|
||||
.connection_broker
|
||||
.request(
|
||||
bpr::ss::member::identity::SUBJECT_CHECK_USERNAME_FOR_DUPLICATION,
|
||||
ss_check_username_for_duplication_req.encode_to_vec(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
})
|
||||
})?;
|
||||
|
||||
let ss_check_username_for_duplication_res =
|
||||
bpr::ss::member::identity::CheckUsernameForDuplicationResponse::decode(
|
||||
ss_check_username_for_duplication_res_msg.data.as_slice(),
|
||||
)
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
})
|
||||
})?;
|
||||
|
||||
if let Some(e) = ss_check_username_for_duplication_res.error {
|
||||
return Err(bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
}));
|
||||
}
|
||||
|
||||
if let Some(r) = ss_check_username_for_duplication_res.result {
|
||||
message
|
||||
.respond(
|
||||
bpr::c2se::common::identity::CheckUsernameForDuplicationResponse {
|
||||
error: None,
|
||||
result: Some(
|
||||
bpr::c2se::common::identity::check_username_for_duplication_response::Result {
|
||||
duplicated: r.duplicated,
|
||||
},
|
||||
),
|
||||
}
|
||||
.encode_to_vec(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
})
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok::<(), bcr::error::rpc::Error>(())
|
||||
}
|
||||
.await
|
||||
{
|
||||
message
|
||||
.respond(
|
||||
bpr::c2se::common::identity::CheckUsernameForDuplicationResponse {
|
||||
error: Some(bpr::protobuf::rpc::Error::from(e)),
|
||||
result: None,
|
||||
}
|
||||
.encode_to_vec(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn check_nickname_for_duplication(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let s = self
|
||||
.connection_broker
|
||||
.queue_subscribe(
|
||||
bpr::c2se::backend::identity::SUBJECT_CHECK_NICKNAME_FOR_DUPLICATION,
|
||||
self.queue_broker.as_str(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
while let Some(message) = s.next().await {
|
||||
if let Err(e) = async {
|
||||
let client = self.get_client_in_header(&message)?;
|
||||
|
||||
let req = bpr::c2se::common::identity::CheckNicknameForDuplicationRequest::decode(
|
||||
message.data.as_slice(),
|
||||
)
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::InvalidRequest(bcr::error::rpc::InvalidRequest {
|
||||
message: format!("invalid request: {}", e),
|
||||
})
|
||||
})?;
|
||||
|
||||
let ss_check_nickname_for_duplication_req =
|
||||
bpr::ss::member::identity::CheckNicknameForDuplicationRequest {
|
||||
client: Some(client),
|
||||
nickname: req.nickname,
|
||||
};
|
||||
|
||||
let ss_check_nickname_for_duplication_res_msg = self
|
||||
.connection_broker
|
||||
.request(
|
||||
bpr::ss::member::identity::SUBJECT_CHECK_NICKNAME_FOR_DUPLICATION,
|
||||
ss_check_nickname_for_duplication_req.encode_to_vec(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
})
|
||||
})?;
|
||||
|
||||
let ss_check_nickname_for_duplication_res =
|
||||
bpr::ss::member::identity::CheckNicknameForDuplicationResponse::decode(
|
||||
ss_check_nickname_for_duplication_res_msg.data.as_slice(),
|
||||
)
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
})
|
||||
})?;
|
||||
|
||||
if let Some(e) = ss_check_nickname_for_duplication_res.error {
|
||||
return Err(bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
}));
|
||||
}
|
||||
|
||||
if let Some(r) = ss_check_nickname_for_duplication_res.result {
|
||||
message
|
||||
.respond(
|
||||
bpr::c2se::common::identity::CheckNicknameForDuplicationResponse {
|
||||
error: None,
|
||||
result: Some(
|
||||
bpr::c2se::common::identity::check_nickname_for_duplication_response::Result {
|
||||
duplicated: r.duplicated,
|
||||
},
|
||||
),
|
||||
}
|
||||
.encode_to_vec(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
})
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok::<(), bcr::error::rpc::Error>(())
|
||||
}
|
||||
.await
|
||||
{
|
||||
message
|
||||
.respond(
|
||||
bpr::c2se::common::identity::CheckNicknameForDuplicationResponse {
|
||||
error: Some(bpr::protobuf::rpc::Error::from(e)),
|
||||
result: None,
|
||||
}
|
||||
.encode_to_vec(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn captcha(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let s = self
|
||||
.connection_broker
|
||||
.queue_subscribe(
|
||||
bpr::c2se::backend::identity::SUBJECT_CAPTCHA,
|
||||
self.queue_broker.as_str(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
while let Some(message) = s.next().await {
|
||||
if let Err(e) = async {
|
||||
let client = self.get_client_in_header(&message)?;
|
||||
|
||||
let _req = bpr::c2se::common::identity::CaptchaRequest::decode(message.data.as_slice())
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::InvalidRequest(bcr::error::rpc::InvalidRequest {
|
||||
message: format!("invalid request: {}", e),
|
||||
})
|
||||
})?;
|
||||
|
||||
let ss_captcha_req = bpr::ss::member::identity::CaptchaRequest {
|
||||
client: Some(client),
|
||||
};
|
||||
|
||||
let ss_captcha_res_msg = self
|
||||
.connection_broker
|
||||
.request(
|
||||
bpr::ss::member::identity::SUBJECT_CAPTCHA,
|
||||
ss_captcha_req.encode_to_vec(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
})
|
||||
})?;
|
||||
|
||||
let ss_captcha_res =
|
||||
bpr::ss::member::identity::CaptchaResponse::decode(ss_captcha_res_msg.data.as_slice())
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
})
|
||||
})?;
|
||||
|
||||
if let Some(e) = ss_captcha_res.error {
|
||||
return Err(bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
}));
|
||||
}
|
||||
|
||||
if let Some(r) = ss_captcha_res.result {
|
||||
message
|
||||
.respond(
|
||||
bpr::c2se::common::identity::CaptchaResponse {
|
||||
error: None,
|
||||
result: Some(bpr::c2se::common::identity::captcha_response::Result {
|
||||
token: r.token,
|
||||
image: r.image,
|
||||
}),
|
||||
}
|
||||
.encode_to_vec(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
})
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok::<(), bcr::error::rpc::Error>(())
|
||||
}
|
||||
.await
|
||||
{
|
||||
message
|
||||
.respond(
|
||||
bpr::c2se::common::identity::CaptchaResponse {
|
||||
error: Some(bpr::protobuf::rpc::Error::from(e)),
|
||||
result: None,
|
||||
}
|
||||
.encode_to_vec(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn signin(&self) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let s = self
|
||||
.connection_broker
|
||||
.queue_subscribe(
|
||||
bpr::c2se::backend::identity::SUBJECT_SIGNIN,
|
||||
self.queue_broker.as_str(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
while let Some(message) = s.next().await {
|
||||
if let Err(e) = async {
|
||||
let client = self.get_client_in_header(&message)?;
|
||||
|
||||
let req = bpr::c2se::backend::identity::SigninRequest::decode(message.data.as_slice())
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::InvalidRequest(bcr::error::rpc::InvalidRequest {
|
||||
message: format!("invalid request: {}", e),
|
||||
})
|
||||
})?;
|
||||
|
||||
let ss_signin_req = bpr::ss::member::identity::SigninRequest {
|
||||
client: Some(client),
|
||||
token: req.token,
|
||||
security_code: req.security_code,
|
||||
username: req.username,
|
||||
password: req.password,
|
||||
};
|
||||
|
||||
let ss_signin_res_msg = self
|
||||
.connection_broker
|
||||
.request(
|
||||
bpr::ss::member::identity::SUBJECT_SIGNIN,
|
||||
ss_signin_req.encode_to_vec(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
})
|
||||
})?;
|
||||
|
||||
let ss_signin_res =
|
||||
bpr::ss::member::identity::SigninResponse::decode(ss_signin_res_msg.data.as_slice())
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
})
|
||||
})?;
|
||||
|
||||
if let Some(e) = ss_signin_res.error {
|
||||
return Err(bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
}));
|
||||
}
|
||||
|
||||
if let Some(r) = ss_signin_res.result {
|
||||
message
|
||||
.respond(
|
||||
bpr::c2se::backend::identity::SigninResponse {
|
||||
error: None,
|
||||
result: Some(bpr::c2se::backend::identity::signin_response::Result {
|
||||
session_id: r.session_id,
|
||||
}),
|
||||
}
|
||||
.encode_to_vec(),
|
||||
)
|
||||
.await
|
||||
.map_err(|e| {
|
||||
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
|
||||
message: format!("server: {}", e),
|
||||
})
|
||||
})?;
|
||||
}
|
||||
|
||||
Ok::<(), bcr::error::rpc::Error>(())
|
||||
}
|
||||
.await
|
||||
{
|
||||
message
|
||||
.respond(
|
||||
bpr::c2se::backend::identity::SigninResponse {
|
||||
error: Some(bpr::protobuf::rpc::Error::from(e)),
|
||||
result: None,
|
||||
}
|
||||
.encode_to_vec(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
26
src/main.rs
Normal file
26
src/main.rs
Normal file
|
@ -0,0 +1,26 @@
|
|||
use std::env;
|
||||
|
||||
mod identity;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<(), Box<dyn std::error::Error>> {
|
||||
let url_broker = match env::var_os("URL_BROKER") {
|
||||
Some(v) => v.into_string().unwrap(),
|
||||
None => "".to_string(),
|
||||
};
|
||||
let queue_broker = match env::var_os("QUEUE_BROKER") {
|
||||
Some(v) => v.into_string().unwrap(),
|
||||
None => "".to_string(),
|
||||
};
|
||||
|
||||
let broker_opts = nats::asynk::Options::new();
|
||||
let connection_broker = broker_opts.connect(url_broker).await?;
|
||||
|
||||
let identity_service = identity::service::Service::new(connection_broker, queue_broker);
|
||||
|
||||
println!("Server edge[beteran-backend-server-edge] is started");
|
||||
|
||||
futures::try_join!(identity_service.subscribe())?;
|
||||
|
||||
Ok(())
|
||||
}
|
Loading…
Reference in New Issue
Block a user