initialized

This commit is contained in:
병준 박 2022-08-05 04:10:34 +00:00
parent 2977de88ee
commit a3d54879e5
56 changed files with 3549 additions and 0 deletions

39
.devcontainer/Dockerfile Normal file
View File

@ -0,0 +1,39 @@
# Note: You can use any Debian/Ubuntu based image you want.
FROM mcr.microsoft.com/vscode/devcontainers/base:0-bullseye
# [Option] Install zsh
ARG INSTALL_ZSH="true"
# [Option] Upgrade OS packages to their latest versions
ARG UPGRADE_PACKAGES="false"
# [Option] Enable non-root Docker access in container
ARG ENABLE_NONROOT_DOCKER="true"
# [Option] Use the OSS Moby CLI instead of the licensed Docker CLI
ARG USE_MOBY="true"
# Enable new "BUILDKIT" mode for Docker CLI
ENV DOCKER_BUILDKIT=1
# Install needed packages and setup non-root user. Use a separate RUN statement to add your
# own dependencies. A user of "automatic" attempts to reuse an user ID if one already exists.
ARG USERNAME=automatic
ARG USER_UID=1000
ARG USER_GID=$USER_UID
COPY library-scripts/*.sh /tmp/library-scripts/
RUN apt-get update \
&& /bin/bash /tmp/library-scripts/common-debian.sh "${INSTALL_ZSH}" "${USERNAME}" "${USER_UID}" "${USER_GID}" "${UPGRADE_PACKAGES}" "true" "true" \
# Use Docker script from script library to set things up
&& /bin/bash /tmp/library-scripts/docker-debian.sh "${ENABLE_NONROOT_DOCKER}" "/var/run/docker-host.sock" "/var/run/docker.sock" "${USERNAME}" \
# Clean up
&& apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts/
# Setting the ENTRYPOINT to docker-init.sh will configure non-root access
# to the Docker socket. The script will also execute CMD as needed.
ENTRYPOINT [ "/usr/local/share/docker-init.sh" ]
CMD [ "sleep", "infinity" ]
# [Optional] Uncomment this section to install additional OS packages.
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
# && apt-get -y install --no-install-recommends <your-package-list-here>
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
&& apt-get -y install --no-install-recommends libpq-dev

View File

@ -0,0 +1,52 @@
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
// https://github.com/microsoft/vscode-dev-containers/tree/v0.234.0/containers/docker-from-docker-compose
{
"name": "beteran-server-service",
"dockerComposeFile": "docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspace",
// Configure tool-specific properties.
"customizations": {
// Configure properties specific to VS Code.
"vscode": {
// Set *default* container specific settings.json values on container create.
"settings": {
"lldb.verboseLogging": true,
"lldb.executable": "/usr/bin/lldb",
// VS Code don't watch files under ./target
"files.watcherExclude": {
"**/target/**": true
},
"rust-analyzer.checkOnSave.command": "clippy",
"editor.tabSize": 2,
"editor.insertSpaces": true,
"editor.formatOnSave": true
},
// Add the IDs of extensions you want installed when the container is created.
"extensions": [
"donjayamanne.githistory",
"eamodio.gitlens",
"matklad.rust-analyzer",
"mhutchie.git-graph",
"ms-azuretools.vscode-docker",
"mutantdino.resourcemonitor",
"serayuzgur.crates",
"tamasfe.even-better-toml",
"vadimcn.vscode-lldb"
]
}
},
// Use 'forwardPorts' to make a list of ports inside the container available locally.
// "forwardPorts": [],
// Use 'postCreateCommand' to run commands after the container is created.
// "postCreateCommand": "docker --version",
"postCreateCommand": "bash ./.devcontainer/scripts/postCreateCommand.sh",
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
"remoteUser": "vscode",
"features": {
"git": "latest",
"rust": "latest"
}
}

View File

@ -0,0 +1,29 @@
version: '3'
services:
app:
build:
context: .
dockerfile: Dockerfile
volumes:
# Forwards the local Docker socket to the container.
- /var/run/docker.sock:/var/run/docker-host.sock
# Update this to wherever you want VS Code to mount the folder of your project
- ..:/workspace:cached
# Overrides default command so things don't shut down after the process ends.
entrypoint: /usr/local/share/docker-init.sh
command: sleep infinity
# Uncomment the next four lines if you will use a ptrace-based debuggers like C++, Go, and Rust.
# cap_add:
# - SYS_PTRACE
# security_opt:
# - seccomp:unconfined
# Uncomment the next line to use a non-root user for all processes.
# user: vscode
# Use "forwardPorts" in **devcontainer.json** to forward an app port locally.
# (Adding the "ports" property to this file will not forward from a Codespace.)

View File

@ -0,0 +1,454 @@
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
set -e
INSTALL_ZSH=${1:-"true"}
USERNAME=${2:-"automatic"}
USER_UID=${3:-"automatic"}
USER_GID=${4:-"automatic"}
UPGRADE_PACKAGES=${5:-"true"}
INSTALL_OH_MYS=${6:-"true"}
ADD_NON_FREE_PACKAGES=${7:-"false"}
SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
rm -f /etc/profile.d/00-restore-env.sh
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
chmod +x /etc/profile.d/00-restore-env.sh
# If in automatic mode, determine if a user already exists, if not use vscode
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
USERNAME=""
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
USERNAME=${CURRENT_USER}
break
fi
done
if [ "${USERNAME}" = "" ]; then
USERNAME=vscode
fi
elif [ "${USERNAME}" = "none" ]; then
USERNAME=root
USER_UID=0
USER_GID=0
fi
# Load markers to see which steps have already run
if [ -f "${MARKER_FILE}" ]; then
echo "Marker file found:"
cat "${MARKER_FILE}"
source "${MARKER_FILE}"
fi
# Ensure apt is in non-interactive to avoid prompts
export DEBIAN_FRONTEND=noninteractive
# Function to call apt-get if needed
apt_get_update_if_needed()
{
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
echo "Running apt-get update..."
apt-get update
else
echo "Skipping apt-get update."
fi
}
# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
package_list="apt-utils \
openssh-client \
gnupg2 \
dirmngr \
iproute2 \
procps \
lsof \
htop \
net-tools \
psmisc \
curl \
wget \
rsync \
ca-certificates \
unzip \
zip \
nano \
vim-tiny \
less \
jq \
lsb-release \
apt-transport-https \
dialog \
libc6 \
libgcc1 \
libkrb5-3 \
libgssapi-krb5-2 \
libicu[0-9][0-9] \
liblttng-ust[0-9] \
libstdc++6 \
zlib1g \
locales \
sudo \
ncdu \
man-db \
strace \
manpages \
manpages-dev \
init-system-helpers"
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
# Bring in variables from /etc/os-release like VERSION_CODENAME
. /etc/os-release
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
# Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
echo "Running apt-get update..."
apt-get update
package_list="${package_list} manpages-posix manpages-posix-dev"
else
apt_get_update_if_needed
fi
# Install libssl1.1 if available
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
package_list="${package_list} libssl1.1"
fi
# Install appropriate version of libssl1.0.x if available
libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
# Debian 9
package_list="${package_list} libssl1.0.2"
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
# Ubuntu 18.04, 16.04, earlier
package_list="${package_list} libssl1.0.0"
fi
fi
echo "Packages to verify are installed: ${package_list}"
apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 )
# Install git if not already installed (may be more recent than distro version)
if ! type git > /dev/null 2>&1; then
apt-get -y install --no-install-recommends git
fi
PACKAGES_ALREADY_INSTALLED="true"
fi
# Get to latest versions of all packages
if [ "${UPGRADE_PACKAGES}" = "true" ]; then
apt_get_update_if_needed
apt-get -y upgrade --no-install-recommends
apt-get autoremove -y
fi
# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
# Common need for both applications and things like the agnoster ZSH theme.
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
locale-gen
LOCALE_ALREADY_SET="true"
fi
# Create or update a non-root user to match UID/GID.
group_name="${USERNAME}"
if id -u ${USERNAME} > /dev/null 2>&1; then
# User exists, update if needed
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then
group_name="$(id -gn $USERNAME)"
groupmod --gid $USER_GID ${group_name}
usermod --gid $USER_GID $USERNAME
fi
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
usermod --uid $USER_UID $USERNAME
fi
else
# Create user
if [ "${USER_GID}" = "automatic" ]; then
groupadd $USERNAME
else
groupadd --gid $USER_GID $USERNAME
fi
if [ "${USER_UID}" = "automatic" ]; then
useradd -s /bin/bash --gid $USERNAME -m $USERNAME
else
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
fi
fi
# Add add sudo support for non-root user
if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME
chmod 0440 /etc/sudoers.d/$USERNAME
EXISTING_NON_ROOT_USER="${USERNAME}"
fi
# ** Shell customization section **
if [ "${USERNAME}" = "root" ]; then
user_rc_path="/root"
else
user_rc_path="/home/${USERNAME}"
fi
# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then
cp /etc/skel/.bashrc "${user_rc_path}/.bashrc"
fi
# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then
cp /etc/skel/.profile "${user_rc_path}/.profile"
fi
# .bashrc/.zshrc snippet
rc_snippet="$(cat << 'EOF'
if [ -z "${USER}" ]; then export USER=$(whoami); fi
if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
# Display optional first run image specific notice if configured and terminal is interactive
if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
cat "/workspaces/.codespaces/shared/first-run-notice.txt"
fi
mkdir -p "$HOME/.config/vscode-dev-containers"
# Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
fi
# Set the default git editor if not already set
if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
if [ "${TERM_PROGRAM}" = "vscode" ]; then
if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then
export GIT_EDITOR="code-insiders --wait"
else
export GIT_EDITOR="code --wait"
fi
fi
fi
EOF
)"
# code shim, it fallbacks to code-insiders if code is not available
cat << 'EOF' > /usr/local/bin/code
#!/bin/sh
get_in_path_except_current() {
which -a "$1" | grep -A1 "$0" | grep -v "$0"
}
code="$(get_in_path_except_current code)"
if [ -n "$code" ]; then
exec "$code" "$@"
elif [ "$(command -v code-insiders)" ]; then
exec code-insiders "$@"
else
echo "code or code-insiders is not installed" >&2
exit 127
fi
EOF
chmod +x /usr/local/bin/code
# systemctl shim - tells people to use 'service' if systemd is not running
cat << 'EOF' > /usr/local/bin/systemctl
#!/bin/sh
set -e
if [ -d "/run/systemd/system" ]; then
exec /bin/systemctl/systemctl "$@"
else
echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services instead. e.g.: \n\nservice --status-all'
fi
EOF
chmod +x /usr/local/bin/systemctl
# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
codespaces_bash="$(cat \
<<'EOF'
# Codespaces bash prompt theme
__bash_prompt() {
local userpart='`export XIT=$? \
&& [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
&& [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
local gitbranch='`\
if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \
export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
if [ "${BRANCH}" != "" ]; then \
echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
&& if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
echo -n " \[\033[1;33m\]✗"; \
fi \
&& echo -n "\[\033[0;36m\]) "; \
fi; \
fi`'
local lightblue='\[\033[1;34m\]'
local removecolor='\[\033[0m\]'
PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
unset -f __bash_prompt
}
__bash_prompt
EOF
)"
codespaces_zsh="$(cat \
<<'EOF'
# Codespaces zsh prompt theme
__zsh_prompt() {
local prompt_username
if [ ! -z "${GITHUB_USER}" ]; then
prompt_username="@${GITHUB_USER}"
else
prompt_username="%n"
fi
PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status
PROMPT+='%{$fg[white]%}$ %{$reset_color%}'
unset -f __zsh_prompt
}
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
__zsh_prompt
EOF
)"
# Add RC snippet and custom bash prompt
if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
echo "${rc_snippet}" >> /etc/bash.bashrc
echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc"
echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc"
if [ "${USERNAME}" != "root" ]; then
echo "${codespaces_bash}" >> "/root/.bashrc"
echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc"
fi
chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
RC_SNIPPET_ALREADY_ADDED="true"
fi
# Optionally install and configure zsh and Oh My Zsh!
if [ "${INSTALL_ZSH}" = "true" ]; then
if ! type zsh > /dev/null 2>&1; then
apt_get_update_if_needed
apt-get install -y zsh
fi
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
echo "${rc_snippet}" >> /etc/zsh/zshrc
ZSH_ALREADY_INSTALLED="true"
fi
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
user_rc_file="${user_rc_path}/.zshrc"
umask g-w,o-w
mkdir -p ${oh_my_install_dir}
git clone --depth=1 \
-c core.eol=lf \
-c core.autocrlf=false \
-c fsck.zeroPaddedFilemode=ignore \
-c fetch.fsck.zeroPaddedFilemode=ignore \
-c receive.fsck.zeroPaddedFilemode=ignore \
"https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
mkdir -p ${oh_my_install_dir}/custom/themes
echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
# Shrink git while still enabling updates
cd "${oh_my_install_dir}"
git repack -a -d -f --depth=1 --window=1
# Copy to non-root user if one is specified
if [ "${USERNAME}" != "root" ]; then
cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
chown -R ${USERNAME}:${group_name} "${user_rc_path}"
fi
fi
fi
# Persist image metadata info, script if meta.env found in same directory
meta_info_script="$(cat << 'EOF'
#!/bin/sh
. /usr/local/etc/vscode-dev-containers/meta.env
# Minimal output
if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
echo "${VERSION}"
exit 0
elif [ "$1" = "release" ]; then
echo "${GIT_REPOSITORY_RELEASE}"
exit 0
elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
echo "${CONTENTS_URL}"
exit 0
fi
#Full output
echo
echo "Development container image information"
echo
if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
echo
EOF
)"
if [ -f "${SCRIPT_DIR}/meta.env" ]; then
mkdir -p /usr/local/etc/vscode-dev-containers/
cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
echo "${meta_info_script}" > /usr/local/bin/devcontainer-info
chmod +x /usr/local/bin/devcontainer-info
fi
# Write marker file
mkdir -p "$(dirname "${MARKER_FILE}")"
echo -e "\
PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}"
echo "Done!"

View File

@ -0,0 +1,354 @@
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
#
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker.md
# Maintainer: The VS Code and Codespaces Teams
#
# Syntax: ./docker-debian.sh [enable non-root docker socket access flag] [source socket] [target socket] [non-root user] [use moby] [CLI version] [Major version for docker-compose]
ENABLE_NONROOT_DOCKER=${1:-"true"}
SOURCE_SOCKET=${2:-"/var/run/docker-host.sock"}
TARGET_SOCKET=${3:-"/var/run/docker.sock"}
USERNAME=${4:-"automatic"}
USE_MOBY=${5:-"true"}
DOCKER_VERSION=${6:-"latest"}
DOCKER_DASH_COMPOSE_VERSION=${7:-"v1"} # v1 or v2
MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc"
DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES="buster bullseye bionic focal jammy"
DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES="buster bullseye bionic focal hirsute impish jammy"
set -e
if [ "$(id -u)" -ne 0 ]; then
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
exit 1
fi
# Determine the appropriate non-root user
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
USERNAME=""
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
USERNAME=${CURRENT_USER}
break
fi
done
if [ "${USERNAME}" = "" ]; then
USERNAME=root
fi
elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
USERNAME=root
fi
# Get central common setting
get_common_setting() {
if [ "${common_settings_file_loaded}" != "true" ]; then
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
common_settings_file_loaded=true
fi
if [ -f "/tmp/vsdc-settings.env" ]; then
local multi_line=""
if [ "$2" = "true" ]; then multi_line="-z"; fi
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
fi
echo "$1=${!1}"
}
# Function to run apt-get if needed
apt_get_update_if_needed()
{
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
echo "Running apt-get update..."
apt-get update
else
echo "Skipping apt-get update."
fi
}
# Checks if packages are installed and installs them if not
check_packages() {
if ! dpkg -s "$@" > /dev/null 2>&1; then
apt_get_update_if_needed
apt-get -y install --no-install-recommends "$@"
fi
}
# Figure out correct version of a three part version number is not passed
find_version_from_git_tags() {
local variable_name=$1
local requested_version=${!variable_name}
if [ "${requested_version}" = "none" ]; then return; fi
local repository=$2
local prefix=${3:-"tags/v"}
local separator=${4:-"."}
local last_part_optional=${5:-"false"}
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
local escaped_separator=${separator//./\\.}
local last_part
if [ "${last_part_optional}" = "true" ]; then
last_part="(${escaped_separator}[0-9]+)?"
else
last_part="${escaped_separator}[0-9]+"
fi
local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
else
set +e
declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
set -e
fi
fi
if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then
echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
exit 1
fi
echo "${variable_name}=${!variable_name}"
}
# Ensure apt is in non-interactive to avoid prompts
export DEBIAN_FRONTEND=noninteractive
# Install dependencies
check_packages apt-transport-https curl ca-certificates gnupg2 dirmngr
if ! type git > /dev/null 2>&1; then
apt_get_update_if_needed
apt-get -y install git
fi
# Source /etc/os-release to get OS info
. /etc/os-release
# Fetch host/container arch.
architecture="$(dpkg --print-architecture)"
# Check if distro is suppported
if [ "${USE_MOBY}" = "true" ]; then
# 'get_common_setting' allows attribute to be updated remotely
get_common_setting DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES
if [[ "${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then
err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, either: (1) set feature option '\"moby\": false' , or (2) choose a compatible OS distribution"
err "Support distributions include: ${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}"
exit 1
fi
echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_MOBY_ARCHIVE_VERSION_CODENAMES}'"
else
get_common_setting DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES
if [[ "${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}" != *"${VERSION_CODENAME}"* ]]; then
err "Unsupported distribution version '${VERSION_CODENAME}'. To resolve, please choose a compatible OS distribution"
err "Support distributions include: ${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}"
exit 1
fi
echo "Distro codename '${VERSION_CODENAME}' matched filter '${DOCKER_LICENSED_ARCHIVE_VERSION_CODENAMES}'"
fi
# Set up the necessary apt repos (either Microsoft's or Docker's)
if [ "${USE_MOBY}" = "true" ]; then
cli_package_name="moby-cli"
# Import key safely and import Microsoft apt repo
get_common_setting MICROSOFT_GPG_KEYS_URI
curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg
echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list
else
# Name of proprietary engine package
cli_package_name="docker-ce-cli"
# Import key safely and import Docker apt repo
curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list
fi
# Refresh apt lists
apt-get update
# Soft version matching for CLI
if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then
# Empty, meaning grab whatever "latest" is in apt repo
cli_version_suffix=""
else
# Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...)
docker_version_dot_escaped="${DOCKER_VERSION//./\\.}"
docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}"
# Regex needs to handle debian package version number format: https://www.systutorials.com/docs/linux/man/5-deb-version/
docker_version_regex="^(.+:)?${docker_version_dot_plus_escaped}([\\.\\+ ~:-]|$)"
set +e # Don't exit if finding version fails - will handle gracefully
cli_version_suffix="=$(apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")"
set -e
if [ -z "${cli_version_suffix}" ] || [ "${cli_version_suffix}" = "=" ]; then
echo "(!) No full or partial Docker / Moby version match found for \"${DOCKER_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:"
apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+'
exit 1
fi
echo "cli_version_suffix ${cli_version_suffix}"
fi
# Install Docker / Moby CLI if not already installed
if type docker > /dev/null 2>&1; then
echo "Docker / Moby CLI already installed."
else
if [ "${USE_MOBY}" = "true" ]; then
apt-get -y install --no-install-recommends moby-cli${cli_version_suffix} moby-buildx
apt-get -y install --no-install-recommends moby-compose || echo "(*) Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping."
else
apt-get -y install --no-install-recommends docker-ce-cli${cli_version_suffix}
fi
fi
# Install Docker Compose if not already installed and is on a supported architecture
if type docker-compose > /dev/null 2>&1; then
echo "Docker Compose already installed."
else
TARGET_COMPOSE_ARCH="$(uname -m)"
if [ "${TARGET_COMPOSE_ARCH}" = "amd64" ]; then
TARGET_COMPOSE_ARCH="x86_64"
fi
if [ "${TARGET_COMPOSE_ARCH}" != "x86_64" ]; then
# Use pip to get a version that runns on this architecture
if ! dpkg -s python3-minimal python3-pip libffi-dev python3-venv > /dev/null 2>&1; then
apt_get_update_if_needed
apt-get -y install python3-minimal python3-pip libffi-dev python3-venv
fi
export PIPX_HOME=/usr/local/pipx
mkdir -p ${PIPX_HOME}
export PIPX_BIN_DIR=/usr/local/bin
export PYTHONUSERBASE=/tmp/pip-tmp
export PIP_CACHE_DIR=/tmp/pip-tmp/cache
pipx_bin=pipx
if ! type pipx > /dev/null 2>&1; then
pip3 install --disable-pip-version-check --no-cache-dir --user pipx
pipx_bin=/tmp/pip-tmp/bin/pipx
fi
${pipx_bin} install --pip-args '--no-cache-dir --force-reinstall' docker-compose
rm -rf /tmp/pip-tmp
else
compose_v1_version="1"
find_version_from_git_tags compose_v1_version "https://github.com/docker/compose" "tags/"
echo "(*) Installing docker-compose ${compose_v1_version}..."
curl -fsSL "https://github.com/docker/compose/releases/download/${compose_v1_version}/docker-compose-Linux-x86_64" -o /usr/local/bin/docker-compose
chmod +x /usr/local/bin/docker-compose
fi
fi
# Install docker-compose switch if not already installed - https://github.com/docker/compose-switch#manual-installation
current_v1_compose_path="$(which docker-compose)"
target_v1_compose_path="$(dirname "${current_v1_compose_path}")/docker-compose-v1"
if ! type compose-switch > /dev/null 2>&1; then
echo "(*) Installing compose-switch..."
compose_switch_version="latest"
find_version_from_git_tags compose_switch_version "https://github.com/docker/compose-switch"
curl -fsSL "https://github.com/docker/compose-switch/releases/download/v${compose_switch_version}/docker-compose-linux-${architecture}" -o /usr/local/bin/compose-switch
chmod +x /usr/local/bin/compose-switch
# TODO: Verify checksum once available: https://github.com/docker/compose-switch/issues/11
# Setup v1 CLI as alternative in addition to compose-switch (which maps to v2)
mv "${current_v1_compose_path}" "${target_v1_compose_path}"
update-alternatives --install /usr/local/bin/docker-compose docker-compose /usr/local/bin/compose-switch 99
update-alternatives --install /usr/local/bin/docker-compose docker-compose "${target_v1_compose_path}" 1
fi
if [ "${DOCKER_DASH_COMPOSE_VERSION}" = "v1" ]; then
update-alternatives --set docker-compose "${target_v1_compose_path}"
else
update-alternatives --set docker-compose /usr/local/bin/compose-switch
fi
# If init file already exists, exit
if [ -f "/usr/local/share/docker-init.sh" ]; then
exit 0
fi
echo "docker-init doesnt exist, adding..."
# By default, make the source and target sockets the same
if [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ]; then
touch "${SOURCE_SOCKET}"
ln -s "${SOURCE_SOCKET}" "${TARGET_SOCKET}"
fi
# Add a stub if not adding non-root user access, user is root
if [ "${ENABLE_NONROOT_DOCKER}" = "false" ] || [ "${USERNAME}" = "root" ]; then
echo -e '#!/usr/bin/env bash\nexec "$@"' > /usr/local/share/docker-init.sh
chmod +x /usr/local/share/docker-init.sh
exit 0
fi
# Setup a docker group in the event the docker socket's group is not root
if ! grep -qE '^docker:' /etc/group; then
groupadd --system docker
fi
usermod -aG docker "${USERNAME}"
DOCKER_GID="$(grep -oP '^docker:x:\K[^:]+' /etc/group)"
# If enabling non-root access and specified user is found, setup socat and add script
chown -h "${USERNAME}":root "${TARGET_SOCKET}"
if ! dpkg -s socat > /dev/null 2>&1; then
apt_get_update_if_needed
apt-get -y install socat
fi
tee /usr/local/share/docker-init.sh > /dev/null \
<< EOF
#!/usr/bin/env bash
#-------------------------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
#-------------------------------------------------------------------------------------------------------------
set -e
SOCAT_PATH_BASE=/tmp/vscr-docker-from-docker
SOCAT_LOG=\${SOCAT_PATH_BASE}.log
SOCAT_PID=\${SOCAT_PATH_BASE}.pid
# Wrapper function to only use sudo if not already root
sudoIf()
{
if [ "\$(id -u)" -ne 0 ]; then
sudo "\$@"
else
"\$@"
fi
}
# Log messages
log()
{
echo -e "[\$(date)] \$@" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
}
echo -e "\n** \$(date) **" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
log "Ensuring ${USERNAME} has access to ${SOURCE_SOCKET} via ${TARGET_SOCKET}"
# If enabled, try to update the docker group with the right GID. If the group is root,
# fall back on using socat to forward the docker socket to another unix socket so
# that we can set permissions on it without affecting the host.
if [ "${ENABLE_NONROOT_DOCKER}" = "true" ] && [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ] && [ "${USERNAME}" != "root" ] && [ "${USERNAME}" != "0" ]; then
SOCKET_GID=\$(stat -c '%g' ${SOURCE_SOCKET})
if [ "\${SOCKET_GID}" != "0" ] && [ "\${SOCKET_GID}" != "${DOCKER_GID}" ] && ! grep -E ".+:x:\${SOCKET_GID}" /etc/group; then
sudoIf groupmod --gid "\${SOCKET_GID}" docker
else
# Enable proxy if not already running
if [ ! -f "\${SOCAT_PID}" ] || ! ps -p \$(cat \${SOCAT_PID}) > /dev/null; then
log "Enabling socket proxy."
log "Proxying ${SOURCE_SOCKET} to ${TARGET_SOCKET} for vscode"
sudoIf rm -rf ${TARGET_SOCKET}
(sudoIf socat UNIX-LISTEN:${TARGET_SOCKET},fork,mode=660,user=${USERNAME} UNIX-CONNECT:${SOURCE_SOCKET} 2>&1 | sudoIf tee -a \${SOCAT_LOG} > /dev/null & echo "\$!" | sudoIf tee \${SOCAT_PID} > /dev/null)
else
log "Socket proxy already running."
fi
fi
log "Success"
fi
# Execute whatever commands were passed in (if any). This allows us
# to set this script to ENTRYPOINT while still executing the default CMD.
set +e
exec "\$@"
EOF
chmod +x /usr/local/share/docker-init.sh
chown ${USERNAME}:root /usr/local/share/docker-init.sh
echo "Done!"

View File

@ -0,0 +1,5 @@
[toolchain]
channel = "stable"
profile = "minimal"
components = ["clippy", "rustfmt"]
targets = []

View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
set -e

7
.gitignore vendored Normal file
View File

@ -0,0 +1,7 @@
.DS_Store
/build
# Added by cargo
/target
Cargo.lock

75
.rustfmt.toml Normal file
View File

@ -0,0 +1,75 @@
# https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=
array_width = 60
attr_fn_like_width = 70
binop_separator = "Front" # "Front", "Back"
blank_lines_lower_bound = 0
blank_lines_upper_bound = 1
brace_style = "SameLineWhere" #"AlwaysNextLine", "PreferSameLine", "SameLineWhere"
chain_width = 60
color = "Auto" #"Auto", "Always", "Never"
combine_control_expr = true # true, false
comment_width = 80
condense_wildcard_suffixes = false # true, false
control_brace_style = "AlwaysSameLine" # "AlwaysNextLine", "AlwaysSameLine", "ClosingNextLine"
disable_all_formatting = false # true, false
edition = "2015" # "2015", "2018", "2021"
empty_item_single_line = true # true, false
enum_discrim_align_threshold = 0
error_on_line_overflow = false # true, false
error_on_unformatted = false # true, false
fn_args_layout = "Tall" # "Compressed", "Tall", "Vertical"
fn_call_width = 60
fn_single_line = false # true, false
force_explicit_abi = true # true, false
force_multiline_blocks = false # true, false
format_code_in_doc_comments = false # true, false
format_generated_files = false # true, false
format_macro_matchers = false # true, false
format_macro_bodies = true # true, false
format_strings = false # true, false
group_imports = "Preserve" # "Preserve", "StdExternalCrate"
hard_tabs = false # true, false
hex_literal_case = "Preserve" # "Upper", "Lower"
hide_parse_errors = false # true, false
ignore = []
imports_indent = "Block" # "Block", "Visual"
imports_layout = "Mixed" # "Horizontal", "HorizontalVertical", "Mixed", "Vertical"
indent_style = "Block" # "Block", "Visual"
inline_attribute_width = 0
license_template_path = ""
match_arm_blocks = true # true, false
match_arm_leading_pipes = "Never" # "Always", "Never", "Preserve"
match_block_trailing_comma = false # true, false
max_width = 100
merge_derives = true # true, false
imports_granularity = "Preserve" # "Preserve", "Crate", "Module", "Item", "One"
merge_imports = false # true, false
newline_style = "Auto" # "Auto", "Native", "Unix", "Windows"
normalize_comments = false # true, false
normalize_doc_attributes = false # true, false
overflow_delimited_expr = false # true, false
remove_nested_parens = true # true, false
reorder_impl_items = false # true, false
reorder_imports = true # true, false
reorder_modules = true # true, false
report_fixme = "Never" # "Always", "Unnumbered", "Never"
report_todo = "Never" # "Always", "Unnumbered", "Never"
skip_children = false # true, false
single_line_if_else_max_width = 50
space_after_colon = true # true, false
space_before_colon = false # true, false
spaces_around_ranges = false # true, false
struct_field_align_threshold = 0
struct_lit_single_line = true # true, false
struct_lit_width = 18
struct_variant_width = 35
tab_spaces = 2
trailing_comma = "Vertical" # "Always", "Never", "Vertical"
trailing_semicolon = true # true, false
type_punctuation_density = "Wide" # "Compressed", "Wide"
unstable_features = false # true, false
use_field_init_shorthand = false # true, false
use_small_heuristics = "Default" # "Default", "Off", "Max"
use_try_shorthand = false # true, false
where_single_line = false # true, false
wrap_comments = false # true, false

55
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,55 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "lldb",
"request": "launch",
"name": "Debug executable 'beteran-server-service'",
"cargo": {
"args": [
"build",
"--bin=beteran-server-service",
"--package=beteran-server-service"
],
"filter": {
"name": "beteran-server-service",
"kind": "bin"
}
},
"env": {
"URL_DATABASE": "postgresql://beteran:qwer5795QWER@192.168.50.200:25432/beteran",
"URL_BROKER": "nats://192.168.50.200:4222",
"QUEUE_BROKER": "bet.beteran",
},
"args": [],
"cwd": "${workspaceFolder}"
},
{
"type": "lldb",
"request": "launch",
"name": "Debug unit tests in executable 'beteran-server-service'",
"cargo": {
"args": [
"test",
"--no-run",
"--bin=beteran-server-service",
"--package=beteran-server-service"
],
"filter": {
"name": "beteran-server-service",
"kind": "bin"
}
},
"env": {
"URL_DATABASE": "postgresql://beteran:qwer5795QWER@192.168.50.200:25432/beteran",
"URL_BROKER": "nats://192.168.50.200:4222",
"QUEUE_BROKER": "bet.beteran",
},
"args": [],
"cwd": "${workspaceFolder}"
}
]
}

34
Cargo.toml Normal file
View File

@ -0,0 +1,34 @@
[package]
name = "beteran-server-service"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[[bin]]
name = "beteran-server-service"
path = "./src/main.rs"
[dependencies]
captcha = { version = "0" }
chrono = { version = "0" }
diesel = { version = "1", features = ["chrono", "r2d2", "uuidv07", "postgres"] }
diesel_migrations = { version = "1" }
diesel-derive-enum = { version = "1", features = ["postgres"] }
futures = { version = "0", default-features = false, features = [
"async-await",
] }
nats = { version = "0" }
prost = { version = "0" }
rust-argon2 = { version = "1" }
serde = { version = "1", features = ["derive"] }
serde_json = { version = "1" }
tokio = { version = "1", features = ["macros", "rt-multi-thread"] }
tokio-cron-scheduler = { version = "0" }
uuid = { version = "0", features = ["serde", "v4", "v5"] }
beteran-protobuf-rust = { git = "https://gitlab.loafle.net/bet/beteran-protobuf-rust.git", tag = "v0.1.21-snapshot" }
beteran-common-rust = { git = "https://gitlab.loafle.net/bet/beteran-common-rust.git", tag = "v0.1.1-snapshot" }
[build-dependencies]

View File

@ -0,0 +1,4 @@
DROP EXTENSION "uuid-ossp";
DROP FUNCTION update_updated_at_column;
DROP FUNCTION update_state_changed_at_column;
DROP FUNCTION update_active_changed_at_column;

View File

@ -0,0 +1,36 @@
CREATE EXTENSION IF NOT EXISTS "uuid-ossp";
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = (extract(epoch from now()) * 1000);
RETURN NEW;
END;
$$ language 'plpgsql';
CREATE OR REPLACE FUNCTION update_state_changed_at_column()
RETURNS TRIGGER AS $$
BEGIN
IF OLD.state != NEW.state THEN
NEW.state_changed_at = (extract(epoch from now()) * 1000);
ELSE
NEW.state_changed_at = NEW.state_changed_at;
END IF;
RETURN NEW;
END;
$$ language 'plpgsql';
CREATE OR REPLACE FUNCTION update_active_changed_at_column()
RETURNS TRIGGER AS $$
BEGIN
IF OLD.active != NEW.active THEN
NEW.active_changed_at = (extract(epoch from now()) * 1000);
ELSE
NEW.active_changed_at = NEW.active_changed_at;
END IF;
RETURN NEW;
END;
$$ language 'plpgsql';

View File

@ -0,0 +1,4 @@
DROP UNIQUE INDEX uidx_member_sites_url;
DROP TRIGGER tg_member_sites_updated_at;
DROP TABLE member_sites;

View File

@ -0,0 +1,18 @@
CREATE TABLE IF NOT EXISTS member_sites (
id UUID DEFAULT uuid_generate_v4(),
url TEXT NOT NULL,
created_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
updated_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
deleted_at BIGINT,
PRIMARY KEY (id),
UNIQUE (url)
);
CREATE UNIQUE INDEX uidx_member_sites_url ON member_sites (url);
-- trigger (updated_at)
CREATE TRIGGER tg_member_sites_updated_at
BEFORE UPDATE
ON member_sites
FOR EACH ROW
EXECUTE PROCEDURE update_updated_at_column();

View File

@ -0,0 +1,3 @@
DROP TRIGGER tg_member_levels_updated_at;
DROP TABLE member_levels;

View File

@ -0,0 +1,17 @@
CREATE TABLE IF NOT EXISTS member_levels (
id UUID DEFAULT uuid_generate_v4(),
name TEXT NOT NULL,
sort_order SMALLINT NOT NULL,
created_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
updated_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
deleted_at BIGINT,
PRIMARY KEY (id),
UNIQUE (name)
);
-- trigger (updated_at)
CREATE TRIGGER tg_member_levels_updated_at
BEFORE UPDATE
ON member_levels
FOR EACH ROW
EXECUTE PROCEDURE update_updated_at_column();

View File

@ -0,0 +1,3 @@
DROP TRIGGER tg_member_classes_updated_at;
DROP TABLE member_classes;

View File

@ -0,0 +1,17 @@
CREATE TABLE IF NOT EXISTS member_classes (
id UUID DEFAULT uuid_generate_v4(),
name TEXT NOT NULL,
parent_id UUID,
created_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
updated_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
deleted_at BIGINT,
PRIMARY KEY (id),
UNIQUE (name)
);
-- trigger (updated_at)
CREATE TRIGGER tg_member_classes_updated_at
BEFORE UPDATE
ON member_classes
FOR EACH ROW
EXECUTE PROCEDURE update_updated_at_column();

View File

@ -0,0 +1,5 @@
DROP INDEX idx_members_username;
DROP TRIGGER tg_members_state_changed_at;
DROP TRIGGER tg_members_updated_at;
DROP TABLE members;

View File

@ -0,0 +1,53 @@
CREATE TYPE member_state AS ENUM ('normal', 'pending', 'withdrawal', 'dormancy', 'blacklist', 'suspended');
CREATE TABLE IF NOT EXISTS members (
id UUID DEFAULT uuid_generate_v4(),
member_site_id UUID NOT NULL,
member_class_id UUID NOT NULL,
member_level_id UUID NOT NULL,
username TEXT NOT NULL,
password TEXT NOT NULL,
nickname TEXT NOT NULL,
mobile_phone_number TEXT,
state MEMBER_STATE DEFAULT 'normal',
state_changed_at BIGINT,
last_signined_ip TEXT,
last_signined_at BIGINT,
referrer_member_id UUID,
referred_count BIGINT NOT NULL,
created_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
updated_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
deleted_at BIGINT,
PRIMARY KEY (id),
UNIQUE (username),
CONSTRAINT fk_members_member_site_id
FOREIGN KEY(member_site_id)
REFERENCES member_sites(id),
CONSTRAINT fk_members_member_class_id
FOREIGN KEY(member_class_id)
REFERENCES member_classes(id),
CONSTRAINT fk_members_member_level_id
FOREIGN KEY(member_level_id)
REFERENCES member_levels(id),
CONSTRAINT fk_members_referrer_member_id
FOREIGN KEY(referrer_member_id)
REFERENCES members(id)
);
CREATE INDEX idx_members_username ON members (username);
-- trigger (updated_at)
CREATE TRIGGER tg_members_updated_at
BEFORE UPDATE
ON members
FOR EACH ROW
EXECUTE PROCEDURE update_updated_at_column();
-- trigger (state_changed_at)
CREATE TRIGGER tg_members_state_changed_at
BEFORE UPDATE
ON members
FOR EACH ROW
EXECUTE PROCEDURE update_state_changed_at_column();

View File

@ -0,0 +1,3 @@
DROP UNIQUE INDEX uidx_captchas_token;
DROP TABLE captchas;

View File

@ -0,0 +1,12 @@
CREATE TABLE IF NOT EXISTS captchas (
id UUID DEFAULT uuid_generate_v4(),
token TEXT NOT NULL,
security_code TEXT NOT NULL,
expires_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
created_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
PRIMARY KEY (id)
);
-- index
CREATE UNIQUE INDEX uidx_captchas_token ON captchas (token);

View File

@ -0,0 +1 @@
DROP TABLE member_sessions;

View File

@ -0,0 +1,14 @@
CREATE TABLE IF NOT EXISTS member_sessions (
id UUID DEFAULT uuid_generate_v4(),
member_id UUID NOT NULL,
ip TEXT,
last_accessed_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
expires_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
created_at BIGINT NOT NULL DEFAULT (extract(epoch from now()) * 1000),
PRIMARY KEY (id),
CONSTRAINT fk_member_sessions_member_id
FOREIGN KEY(member_id)
REFERENCES members(id)
);

0
src/compositions/mod.rs Normal file
View File

60
src/main.rs Normal file
View File

@ -0,0 +1,60 @@
//!
//!
#[macro_use]
extern crate diesel;
#[macro_use]
extern crate diesel_migrations;
use diesel::{
r2d2::{ConnectionManager, Pool},
PgConnection,
};
use std::env;
mod compositions;
mod repositories;
mod services;
diesel_migrations::embed_migrations!();
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error>> {
let url_server_broker = match env::var_os("URL_BROKER") {
Some(v) => v.into_string().unwrap(),
None => "".to_string(),
};
let queue_server_broker = match env::var_os("QUEUE_BROKER") {
Some(v) => v.into_string().unwrap(),
None => "".to_string(),
};
let url_db = match env::var_os("URL_DATABASE") {
Some(v) => v.into_string().unwrap(),
None => "".to_string(),
};
let manager = ConnectionManager::<PgConnection>::new(url_db);
let pool = Pool::builder()
.max_size(4)
.test_on_check_out(true)
.build(manager)?;
let conn = pool.get()?;
embedded_migrations::run(&conn)?;
let server_broker_opts = nats::asynk::Options::new();
let connection_server_broker = server_broker_opts.connect(url_server_broker).await?;
let identity_service = services::identity::service::Service::new(
connection_server_broker.clone(),
queue_server_broker.clone(),
pool.clone(),
);
println!("Server service [beteran-server-service] is started");
futures::try_join!(identity_service.subscribe(),)?;
Ok(())
}

View File

@ -0,0 +1,9 @@
//!
//!
///
pub mod models;
///
pub mod repository;
///
pub mod schema;

View File

@ -0,0 +1,37 @@
use super::schema::captchas;
use beteran_common_rust as bcr;
///
#[derive(Eq, Hash, Identifiable, Queryable, PartialEq, Debug, Clone)]
#[table_name = "captchas"]
pub struct Captcha {
///
pub id: uuid::Uuid,
///
pub security_code: String,
///
pub expires_at: i64,
///
pub created_at: i64,
}
///
#[derive(Insertable, Debug, Clone)]
#[table_name = "captchas"]
pub struct NewCaptcha {
///
pub security_code: String,
///
pub expires_at: i64,
}
///
#[derive(Debug, Clone)]
pub struct FindAll {
///
pub expires_at: Option<i64>,
///
pub pagination: Option<bcr::models::pagination::Pagination>,
///
pub sorts: Option<Vec<bcr::models::pagination::Sort>>,
}

View File

@ -0,0 +1,135 @@
//!
//!
use super::{models, schema::captchas};
use beteran_common_rust as bcr;
use diesel::prelude::*;
use diesel::result::Error;
///
pub struct Repository {}
impl std::fmt::Debug for Repository {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
f.debug_struct("Repository of captchas").finish()
}
}
impl Default for Repository {
fn default() -> Self {
Self::new()
}
}
impl Repository {
///
pub fn new() -> Repository {
Repository {}
}
///
pub fn insert(
&self,
conn: &diesel::PgConnection,
new_captcha: &models::NewCaptcha,
) -> Result<models::Captcha, Error> {
let captcha = diesel::insert_into(captchas::table)
.values(new_captcha)
.get_result::<models::Captcha>(conn)?;
Ok(captcha)
}
///
pub fn select(
&self,
conn: &diesel::PgConnection,
id: uuid::Uuid,
) -> Result<Option<models::Captcha>, Error> {
match captchas::table.find(id).first::<models::Captcha>(conn) {
Ok(m) => Ok(Some(m)),
Err(e) => match e {
diesel::result::Error::NotFound => Ok(None),
_ => Err(e),
},
}
}
///
pub fn select_all_count(
&self,
conn: &diesel::PgConnection,
find_all: models::FindAll,
) -> Result<i64, Error> {
use captchas::dsl;
let mut q = captchas::table.into_boxed();
if let Some(sp) = find_all.expires_at {
q = q.filter(dsl::expires_at.lt(sp));
}
q.count().get_result(conn)
}
///
pub fn select_all(
&self,
conn: &diesel::PgConnection,
find_all: models::FindAll,
) -> Result<Vec<models::Captcha>, Error> {
use captchas::dsl;
let mut q = captchas::table.into_boxed();
if let Some(sp) = find_all.expires_at {
q = q.filter(dsl::expires_at.lt(sp));
}
if let Some(p) = find_all.pagination {
let page = p.page.unwrap_or(1);
if let Some(page_size) = p.page_size {
q = q.offset(((page - 1) * page_size) as i64);
q = q.limit(page_size as i64);
}
}
if let Some(orderbys) = find_all.sorts {
for s in orderbys {
match s {
bcr::models::pagination::Sort::ASC(property) => match property.as_str() {
"expires_at" => {
q = q.order_by(dsl::expires_at.asc());
}
"created_at" => {
q = q.order_by(dsl::created_at.asc());
}
_ => {}
},
bcr::models::pagination::Sort::DESC(property) => match property.as_str() {
"expires_at" => {
q = q.order_by(dsl::expires_at.desc());
}
"created_at" => {
q = q.order_by(dsl::created_at.desc());
}
_ => {}
},
};
}
}
q.load::<models::Captcha>(conn)
}
///
pub fn delete_expired(&self, conn: &diesel::PgConnection) -> Result<u64, Error> {
use captchas::dsl;
let now = chrono::Utc::now().timestamp();
diesel::delete(dsl::captchas.filter(dsl::expires_at.le(now)))
.execute(conn)
.map(|c| c as u64)
}
}

View File

@ -0,0 +1,16 @@
//!
//!
table! {
///
captchas(id) {
///
id -> Uuid,
///
security_code -> Text,
///
expires_at -> BigInt,
///
created_at -> BigInt,
}
}

View File

@ -0,0 +1,9 @@
//!
//!
///
pub mod models;
///
pub mod repository;
///
pub mod schema;

View File

@ -0,0 +1,125 @@
use super::schema::{members, MemberState};
use beteran_common_rust as bcr;
///
#[derive(Eq, Hash, PartialEq, Debug, Clone, Identifiable, Queryable)]
#[table_name = "members"]
pub struct Member {
///
pub id: uuid::Uuid,
///
pub member_site_id: uuid::Uuid,
///
pub member_class_id: uuid::Uuid,
///
pub member_level_id: uuid::Uuid,
///
pub username: String,
///
pub password: String,
///
pub nickname: String,
///
pub mobile_phone_number: Option<String>,
///
pub state: MemberState,
///
pub state_changed_at: Option<i64>,
///
pub referrer_member_id: Option<uuid::Uuid>,
///
pub referred_count: i64,
///
pub last_signined_ip: Option<String>,
///
pub last_signined_at: Option<i64>,
///
pub created_at: i64,
///
pub updated_at: i64,
///
pub deleted_at: Option<i64>,
}
///
#[derive(Insertable, Debug, Clone)]
#[table_name = "members"]
pub struct NewMember {
///
pub member_site_id: uuid::Uuid,
///
pub member_class_id: uuid::Uuid,
///
pub member_level_id: uuid::Uuid,
///
pub referrer_member_id: Option<uuid::Uuid>,
///
pub username: String,
///
pub password: String,
///
pub nickname: String,
///
pub mobile_phone_number: Option<String>,
}
///
#[derive(AsChangeset, Debug, Clone)]
#[table_name = "members"]
pub struct ModifyMember {
///
pub member_site_id: Option<uuid::Uuid>,
///
pub member_level_id: Option<uuid::Uuid>,
///
pub password: Option<String>,
///
pub mobile_phone_number: Option<String>,
///
pub state: Option<MemberState>,
}
///
#[derive(AsChangeset, Debug, Clone)]
#[table_name = "members"]
pub struct ModifyMember4LastSignined {
///
pub last_signined_ip: String,
}
///
#[derive(AsChangeset, Debug, Clone)]
#[table_name = "members"]
pub struct ModifyMember4DeletedAt {
///
pub deleted_at: Option<i64>,
}
///
#[derive(Debug, Clone)]
pub struct FindAll {
///
pub member_site_id: Option<uuid::Uuid>,
///
pub member_class_id: Option<uuid::Uuid>,
///
pub member_level_id: Option<uuid::Uuid>,
///
pub referrer_member_id: Option<uuid::Uuid>,
///
pub username_like: Option<String>,
///
pub nickname_like: Option<String>,
///
pub mobile_phone_number_like: Option<String>,
///
pub last_signined_ip: Option<String>,
///
pub state: Option<MemberState>,
///
pub deleted_at: Option<bool>,
///
pub pagination: Option<bcr::models::pagination::Pagination>,
///
pub sorts: Option<Vec<bcr::models::pagination::Sort>>,
}

View File

@ -0,0 +1,317 @@
//!
//!
use super::{models, schema::members};
use beteran_common_rust as bcr;
use diesel::prelude::*;
use diesel::result::Error;
///
pub struct Repository {}
impl std::fmt::Debug for Repository {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
f.debug_struct("Repository of members").finish()
}
}
impl Default for Repository {
fn default() -> Self {
Self::new()
}
}
impl Repository {
///
pub fn new() -> Repository {
Repository {}
}
///
pub fn insert(
&self,
conn: &diesel::PgConnection,
new_member: &models::NewMember,
) -> Result<models::Member, Error> {
let inserted = diesel::insert_into(members::table)
.values(new_member)
.get_result::<models::Member>(conn)?;
Ok(inserted)
}
///
pub fn select(
&self,
conn: &diesel::PgConnection,
id: uuid::Uuid,
) -> Result<Option<models::Member>, Error> {
match members::table.find(id).first::<models::Member>(conn) {
Ok(m) => Ok(Some(m)),
Err(e) => match e {
diesel::result::Error::NotFound => Ok(None),
_ => Err(e),
},
}
}
///
pub fn select_by_username(
&self,
conn: &diesel::PgConnection,
username: &str,
) -> Result<Option<models::Member>, Error> {
use members::dsl;
match members::table
.filter(dsl::username.eq(username))
.first::<models::Member>(conn)
{
Ok(m) => Ok(Some(m)),
Err(e) => match e {
diesel::result::Error::NotFound => Ok(None),
_ => Err(e),
},
}
}
///
pub fn select_by_nickname(
&self,
conn: &diesel::PgConnection,
nickname: &str,
) -> Result<Option<models::Member>, Error> {
use members::dsl;
match members::table
.filter(dsl::nickname.eq(nickname))
.first::<models::Member>(conn)
{
Ok(m) => Ok(Some(m)),
Err(e) => match e {
diesel::result::Error::NotFound => Ok(None),
_ => Err(e),
},
}
}
///
pub fn select_all_count(
&self,
conn: &diesel::PgConnection,
find_all: models::FindAll,
) -> Result<i64, Error> {
use members::dsl;
let mut q = members::table.into_boxed();
if let Some(sp) = find_all.member_site_id {
q = q.filter(dsl::member_site_id.eq(sp));
}
if let Some(sp) = find_all.member_class_id {
q = q.filter(dsl::member_class_id.eq(sp));
}
if let Some(sp) = find_all.member_level_id {
q = q.filter(dsl::member_level_id.eq(sp));
}
if let Some(sp) = find_all.username_like {
q = q.filter(dsl::username.like(sp));
}
if let Some(sp) = find_all.nickname_like {
q = q.filter(dsl::nickname.like(sp));
}
if let Some(sp) = find_all.mobile_phone_number_like {
q = q.filter(dsl::mobile_phone_number.like(sp));
}
if let Some(sp) = find_all.last_signined_ip {
q = q.filter(dsl::last_signined_ip.eq(sp));
}
if let Some(sp) = find_all.state {
q = q.filter(dsl::state.eq(sp));
}
if find_all.deleted_at.is_some() {
q = q.filter(dsl::deleted_at.is_not_null());
} else {
q = q.filter(dsl::deleted_at.is_null());
}
q.count().get_result(conn)
}
///
pub fn select_all(
&self,
conn: &diesel::PgConnection,
find_all: models::FindAll,
) -> Result<Vec<models::Member>, Error> {
use members::dsl;
let mut q = members::table.into_boxed();
if let Some(sp) = find_all.member_site_id {
q = q.filter(dsl::member_site_id.eq(sp));
}
if let Some(sp) = find_all.member_class_id {
q = q.filter(dsl::member_class_id.eq(sp));
}
if let Some(sp) = find_all.member_level_id {
q = q.filter(dsl::member_level_id.eq(sp));
}
if let Some(sp) = find_all.username_like {
q = q.filter(dsl::username.like(sp));
}
if let Some(sp) = find_all.nickname_like {
q = q.filter(dsl::nickname.like(sp));
}
if let Some(sp) = find_all.mobile_phone_number_like {
q = q.filter(dsl::mobile_phone_number.like(sp));
}
if let Some(sp) = find_all.last_signined_ip {
q = q.filter(dsl::last_signined_ip.eq(sp));
}
if let Some(sp) = find_all.state {
q = q.filter(dsl::state.eq(sp));
}
if find_all.deleted_at.is_some() {
q = q.filter(dsl::deleted_at.is_not_null());
} else {
q = q.filter(dsl::deleted_at.is_null());
}
if let Some(p) = find_all.pagination {
let page = p.page.unwrap_or(1);
if let Some(page_size) = p.page_size {
q = q.offset(((page - 1) * page_size) as i64);
q = q.limit(page_size as i64);
}
}
if let Some(orderbys) = find_all.sorts {
for s in orderbys {
match s {
bcr::models::pagination::Sort::ASC(property) => match property.as_str() {
"username" => {
q = q.order_by(dsl::username.asc());
}
"nickname" => {
q = q.order_by(dsl::nickname.asc());
}
"mobile_phone_number" => {
q = q.order_by(dsl::mobile_phone_number.asc());
}
"last_signined_ip" => {
q = q.order_by(dsl::last_signined_ip.asc());
}
"state" => {
q = q.order_by(dsl::state.asc());
}
"state_changed_at" => {
q = q.order_by(dsl::state_changed_at.asc());
}
"created_at" => {
q = q.order_by(dsl::created_at.asc());
}
"updated_at" => {
q = q.order_by(dsl::updated_at.asc());
}
"deleted_at" => {
q = q.order_by(dsl::deleted_at.asc());
}
_ => {}
},
bcr::models::pagination::Sort::DESC(property) => match property.as_str() {
"username" => {
q = q.order_by(dsl::username.desc());
}
"nickname" => {
q = q.order_by(dsl::nickname.desc());
}
"mobile_phone_number" => {
q = q.order_by(dsl::mobile_phone_number.desc());
}
"last_signined_ip" => {
q = q.order_by(dsl::last_signined_ip.desc());
}
"state" => {
q = q.order_by(dsl::state.desc());
}
"state_changed_at" => {
q = q.order_by(dsl::state_changed_at.desc());
}
"created_at" => {
q = q.order_by(dsl::created_at.desc());
}
"updated_at" => {
q = q.order_by(dsl::updated_at.desc());
}
"deleted_at" => {
q = q.order_by(dsl::deleted_at.desc());
}
_ => {}
},
};
}
}
q.load::<models::Member>(conn)
}
///
pub fn update(
&self,
conn: &diesel::PgConnection,
id: uuid::Uuid,
modify: &models::ModifyMember,
) -> Result<u64, Error> {
use members::dsl;
diesel::update(dsl::members.filter(dsl::id.eq(id)))
.set(modify)
.execute(conn)
.map(|c| c as u64)
}
///
pub fn update_increase_referred_count(
&self,
conn: &diesel::PgConnection,
id: uuid::Uuid,
) -> Result<u64, Error> {
use members::dsl;
diesel::update(dsl::members.filter(dsl::id.eq(id)))
.set(dsl::referred_count.eq(dsl::referred_count + 1))
.execute(conn)
.map(|c| c as u64)
}
///
pub fn update_last_signined_ip(
&self,
conn: &diesel::PgConnection,
id: uuid::Uuid,
modify: &models::ModifyMember4LastSignined,
) -> Result<u64, Error> {
use members::dsl;
diesel::update(dsl::members.filter(dsl::id.eq(id)))
.set(modify)
.execute(conn)
.map(|c| c as u64)
}
///
pub fn update_deleted_at(
&self,
conn: &diesel::PgConnection,
id: uuid::Uuid,
modify: &models::ModifyMember4DeletedAt,
) -> Result<u64, Error> {
use members::dsl;
diesel::update(dsl::members.filter(dsl::id.eq(id)))
.set(modify)
.execute(conn)
.map(|c| c as u64)
}
}

View File

@ -0,0 +1,56 @@
//!
//!
///
#[derive(Eq, Hash, Debug, Clone, Copy, PartialEq, diesel_derive_enum::DbEnum)]
pub enum MemberState {
Normal,
Pending,
Withdrawal,
Dormancy,
Blacklist,
Suspended,
}
table! {
use diesel::sql_types::{Uuid, Text, BigInt, Nullable};
use super::MemberStateMapping;
///
members(id) {
///
id -> Uuid,
///
member_site_id -> Uuid,
///
member_class_id -> Uuid,
///
member_level_id -> Uuid,
///
username -> Text,
///
password -> Text,
///
nickname -> Text,
///
mobile_phone_number -> Nullable<Text>,
///
state -> MemberStateMapping,
///
state_changed_at -> Nullable<BigInt>,
///
referrer_member_id -> Nullable<Uuid>,
///
referred_count -> BigInt,
///
last_signined_ip -> Nullable<Text>,
///
last_signined_at -> Nullable<BigInt>,
///
created_at -> BigInt,
///
updated_at -> BigInt,
///
deleted_at -> Nullable<BigInt>,
}
}

View File

@ -0,0 +1,9 @@
//!
//!
///
pub mod models;
///
pub mod repository;
///
pub mod schema;

View File

@ -0,0 +1,31 @@
use super::schema::member_classes;
use beteran_common_rust as bcr;
///
#[derive(Eq, Hash, Identifiable, Queryable, PartialEq, Debug, Clone)]
#[table_name = "member_classes"]
pub struct MemberClass {
///
pub id: uuid::Uuid,
///
pub parent_id: Option<uuid::Uuid>,
///
pub name: String,
///
pub created_at: i64,
///
pub updated_at: i64,
///
pub deleted_at: Option<i64>,
}
///
#[derive(Debug, Clone)]
pub struct FindAll {
///
pub name_like: Option<String>,
///
pub pagination: Option<bcr::models::pagination::Pagination>,
///
pub sorts: Option<Vec<bcr::models::pagination::Sort>>,
}

View File

@ -0,0 +1,114 @@
//!
//!
use super::{models, schema::member_classes};
use beteran_common_rust as bcr;
use diesel::prelude::*;
use diesel::result::Error;
///
pub struct Repository {}
impl std::fmt::Debug for Repository {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
f.debug_struct("Repository of member_classes").finish()
}
}
impl Default for Repository {
fn default() -> Self {
Self::new()
}
}
impl Repository {
///
pub fn new() -> Repository {
Repository {}
}
///
pub fn select(
&self,
conn: &diesel::PgConnection,
id: uuid::Uuid,
) -> Result<models::MemberClass, Error> {
member_classes::table
.find(id as uuid::Uuid)
.first::<models::MemberClass>(conn)
}
///
pub fn select_all_count(
&self,
conn: &diesel::PgConnection,
find_all: models::FindAll,
) -> Result<i64, Error> {
let mut q = member_classes::table.into_boxed();
if let Some(sp) = find_all.name_like {
q = q.filter(member_classes::dsl::name.like(sp));
}
q.count().get_result(conn)
}
///
pub fn select_all(
&self,
conn: &diesel::PgConnection,
find_all: models::FindAll,
) -> Result<Vec<models::MemberClass>, Error> {
let mut q = member_classes::table.into_boxed();
if let Some(sp) = find_all.name_like {
q = q.filter(member_classes::dsl::name.like(sp));
}
if let Some(p) = find_all.pagination {
let page = p.page.unwrap_or(1);
if let Some(page_size) = p.page_size {
q = q.offset(((page - 1) * page_size) as i64);
q = q.limit(page_size as i64);
}
}
if let Some(orderbys) = find_all.sorts {
for s in orderbys {
match s {
bcr::models::pagination::Sort::ASC(property) => match property.as_str() {
"name" => {
q = q.order_by(member_classes::name.asc());
}
"created_at" => {
q = q.order_by(member_classes::created_at.asc());
}
"updated_at" => {
q = q.order_by(member_classes::updated_at.asc());
}
"deleted_at" => {
q = q.order_by(member_classes::deleted_at.asc());
}
_ => {}
},
bcr::models::pagination::Sort::DESC(property) => match property.as_str() {
"name" => {
q = q.order_by(member_classes::name.desc());
}
"created_at" => {
q = q.order_by(member_classes::created_at.desc());
}
"updated_at" => {
q = q.order_by(member_classes::updated_at.desc());
}
"deleted_at" => {
q = q.order_by(member_classes::deleted_at.desc());
}
_ => {}
},
};
}
}
q.load::<models::MemberClass>(conn)
}
}

View File

@ -0,0 +1,20 @@
//!
//!
table! {
///
member_classes(id) {
///
id -> Uuid,
///
parent_id -> Nullable<Uuid>,
///
name -> Text,
///
created_at -> BigInt,
///
updated_at -> BigInt,
///
deleted_at -> Nullable<BigInt>,
}
}

View File

@ -0,0 +1,9 @@
//!
//!
///
pub mod models;
///
pub mod repository;
///
pub mod schema;

View File

@ -0,0 +1,31 @@
use super::schema::member_levels;
use beteran_common_rust as bcr;
///
#[derive(Eq, Hash, Identifiable, Queryable, PartialEq, Debug, Clone)]
#[table_name = "member_levels"]
pub struct MemberLevel {
///
pub id: uuid::Uuid,
///
pub name: String,
///
pub sort_order: i16,
///
pub created_at: i64,
///
pub updated_at: i64,
///
pub deleted_at: Option<i64>,
}
///
#[derive(Debug, Clone)]
pub struct FindAll {
///
pub name_like: Option<String>,
///
pub pagination: Option<bcr::models::pagination::Pagination>,
///
pub sorts: Option<Vec<bcr::models::pagination::Sort>>,
}

View File

@ -0,0 +1,120 @@
//!
//!
use super::{models, schema::member_levels};
use beteran_common_rust as bcr;
use diesel::prelude::*;
use diesel::result::Error;
///
pub struct Repository {}
impl std::fmt::Debug for Repository {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
f.debug_struct("Repository of member_levels").finish()
}
}
impl Default for Repository {
fn default() -> Self {
Self::new()
}
}
impl Repository {
///
pub fn new() -> Repository {
Repository {}
}
///
pub fn select(
&self,
conn: &diesel::PgConnection,
id: uuid::Uuid,
) -> Result<models::MemberLevel, Error> {
member_levels::table
.find(id as uuid::Uuid)
.first::<models::MemberLevel>(conn)
}
///
pub fn select_all_count(
&self,
conn: &diesel::PgConnection,
find_all: models::FindAll,
) -> Result<i64, Error> {
let mut q = member_levels::table.into_boxed();
if let Some(sp) = find_all.name_like {
q = q.filter(member_levels::dsl::name.like(sp));
}
q.count().get_result(conn)
}
///
pub fn select_all(
&self,
conn: &diesel::PgConnection,
find_all: models::FindAll,
) -> Result<Vec<models::MemberLevel>, Error> {
let mut q = member_levels::table.into_boxed();
if let Some(sp) = find_all.name_like {
q = q.filter(member_levels::dsl::name.like(sp));
}
if let Some(p) = find_all.pagination {
let page = p.page.unwrap_or(1);
if let Some(page_size) = p.page_size {
q = q.offset(((page - 1) * page_size) as i64);
q = q.limit(page_size as i64);
}
}
if let Some(orderbys) = find_all.sorts {
for s in orderbys {
match s {
bcr::models::pagination::Sort::ASC(property) => match property.as_str() {
"name" => {
q = q.order_by(member_levels::name.asc());
}
"sort_order" => {
q = q.order_by(member_levels::sort_order.asc());
}
"created_at" => {
q = q.order_by(member_levels::created_at.asc());
}
"updated_at" => {
q = q.order_by(member_levels::updated_at.asc());
}
"deleted_at" => {
q = q.order_by(member_levels::deleted_at.asc());
}
_ => {}
},
bcr::models::pagination::Sort::DESC(property) => match property.as_str() {
"name" => {
q = q.order_by(member_levels::name.desc());
}
"sort_order" => {
q = q.order_by(member_levels::sort_order.desc());
}
"created_at" => {
q = q.order_by(member_levels::created_at.desc());
}
"updated_at" => {
q = q.order_by(member_levels::updated_at.desc());
}
"deleted_at" => {
q = q.order_by(member_levels::deleted_at.desc());
}
_ => {}
},
};
}
}
q.load::<models::MemberLevel>(conn)
}
}

View File

@ -0,0 +1,20 @@
//!
//!
table! {
///
member_levels(id) {
///
id -> Uuid,
///
name -> Text,
///
sort_order -> SmallInt,
///
created_at -> BigInt,
///
updated_at -> BigInt,
///
deleted_at -> Nullable<BigInt>,
}
}

View File

@ -0,0 +1,9 @@
//!
//!
///
pub mod models;
///
pub mod repository;
///
pub mod schema;

View File

@ -0,0 +1,53 @@
use super::schema::member_sessions;
use beteran_common_rust as bcr;
///
#[derive(Eq, Hash, Identifiable, Queryable, PartialEq, Debug, Clone)]
#[table_name = "member_sessions"]
pub struct MemberSession {
///
pub id: uuid::Uuid,
///
pub member_id: uuid::Uuid,
///
pub ip: String,
///
pub last_accessed_at: i64,
///
pub expires_at: i64,
///
pub created_at: i64,
}
///
#[derive(Insertable, Debug, Clone)]
#[table_name = "member_sessions"]
pub struct NewMemberSession {
///
pub member_id: uuid::Uuid,
///
pub ip: String,
///
pub expires_at: i64,
}
///
#[derive(AsChangeset, Debug, Clone)]
#[table_name = "member_sessions"]
pub struct ModifyMemberSessionForLastAccess {
///
pub last_accessed_at: i64,
///
pub expires_at: i64,
}
///
#[derive(Debug, Clone)]
pub struct FindAll {
///
pub member_id: Option<uuid::Uuid>,
///
pub pagination: Option<bcr::models::pagination::Pagination>,
///
pub sorts: Option<Vec<bcr::models::pagination::Sort>>,
}

View File

@ -0,0 +1,180 @@
//!
//!
use super::{models, schema::member_sessions};
use beteran_common_rust as bcr;
use diesel::prelude::*;
use diesel::result::Error;
///
pub struct Repository {}
impl std::fmt::Debug for Repository {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
f.debug_struct("Repository of member_sessions").finish()
}
}
impl Default for Repository {
fn default() -> Self {
Self::new()
}
}
impl Repository {
///
pub fn new() -> Repository {
Repository {}
}
///
pub fn insert(
&self,
conn: &diesel::PgConnection,
new_session: &models::NewMemberSession,
) -> Result<models::MemberSession, Error> {
let captcha = diesel::insert_into(member_sessions::table)
.values(new_session)
.get_result::<models::MemberSession>(conn)?;
Ok(captcha)
}
///
pub fn select(
&self,
conn: &diesel::PgConnection,
id: uuid::Uuid,
) -> Result<Option<models::MemberSession>, Error> {
match member_sessions::table
.find(id as uuid::Uuid)
.first::<models::MemberSession>(conn)
{
Ok(m) => Ok(Some(m)),
Err(e) => match e {
diesel::result::Error::NotFound => Ok(None),
_ => Err(e),
},
}
}
///
pub fn select_by_member_id(
&self,
conn: &diesel::PgConnection,
member_id: uuid::Uuid,
) -> Result<Option<models::MemberSession>, Error> {
use member_sessions::dsl;
match member_sessions::table
.filter(dsl::member_id.eq(member_id))
.first::<models::MemberSession>(conn)
{
Ok(m) => Ok(Some(m)),
Err(e) => match e {
diesel::result::Error::NotFound => Ok(None),
_ => Err(e),
},
}
}
///
pub fn select_all_count(
&self,
conn: &diesel::PgConnection,
find_all: models::FindAll,
) -> Result<i64, Error> {
let mut q = member_sessions::table.into_boxed();
if let Some(sp) = find_all.member_id {
q = q.filter(member_sessions::dsl::member_id.eq(sp));
}
q.count().get_result(conn)
}
///
pub fn select_all(
&self,
conn: &diesel::PgConnection,
find_all: models::FindAll,
) -> Result<Vec<models::MemberSession>, Error> {
let mut q = member_sessions::table.into_boxed();
if let Some(sp) = find_all.member_id {
q = q.filter(member_sessions::dsl::member_id.eq(sp));
}
if let Some(p) = find_all.pagination {
let page = p.page.unwrap_or(1);
if let Some(page_size) = p.page_size {
q = q.offset(((page - 1) * page_size) as i64);
q = q.limit(page_size as i64);
}
}
if let Some(orderbys) = find_all.sorts {
for s in orderbys {
match s {
bcr::models::pagination::Sort::ASC(property) => match property.as_str() {
"member_id" => {
q = q.order_by(member_sessions::member_id.asc());
}
"last_accessed_at" => {
q = q.order_by(member_sessions::last_accessed_at.asc());
}
"expires_at" => {
q = q.order_by(member_sessions::expires_at.asc());
}
"created_at" => {
q = q.order_by(member_sessions::created_at.asc());
}
_ => {}
},
bcr::models::pagination::Sort::DESC(property) => match property.as_str() {
"member_id" => {
q = q.order_by(member_sessions::member_id.desc());
}
"last_accessed_at" => {
q = q.order_by(member_sessions::last_accessed_at.desc());
}
"expires_at" => {
q = q.order_by(member_sessions::expires_at.desc());
}
"created_at" => {
q = q.order_by(member_sessions::created_at.desc());
}
_ => {}
},
};
}
}
q.load::<models::MemberSession>(conn)
}
///
pub fn update_last_access(
&self,
conn: &diesel::PgConnection,
id: uuid::Uuid,
modify: &models::ModifyMemberSessionForLastAccess,
) -> Result<u64, Error> {
use member_sessions::dsl;
diesel::update(dsl::member_sessions.filter(dsl::id.eq(id)))
.set(modify)
.execute(conn)
.map(|c| c as u64)
}
///
pub fn delete_expired(&self, conn: &diesel::PgConnection) -> Result<u64, Error> {
use member_sessions::dsl;
let now = chrono::Utc::now().timestamp();
diesel::delete(dsl::member_sessions.filter(dsl::expires_at.le(now)))
.execute(conn)
.map(|c| c as u64)
}
}

View File

@ -0,0 +1,20 @@
//!
//!
table! {
///
member_sessions(id) {
///
id -> Uuid,
///
member_id -> Uuid,
///
ip -> Text,
///
last_accessed_at -> BigInt,
///
expires_at -> BigInt,
///
created_at -> BigInt,
}
}

View File

@ -0,0 +1,9 @@
//!
//!
///
pub mod models;
///
pub mod repository;
///
pub mod schema;

View File

@ -0,0 +1,29 @@
use super::schema::member_sites;
use beteran_common_rust as bcr;
///
#[derive(Eq, Hash, Identifiable, Queryable, PartialEq, Debug, Clone)]
#[table_name = "member_sites"]
pub struct MemberSite {
///
pub id: uuid::Uuid,
///
pub url: String,
///
pub created_at: i64,
///
pub updated_at: i64,
///
pub deleted_at: Option<i64>,
}
///
#[derive(Debug, Clone)]
pub struct FindAll {
///
pub url_like: Option<String>,
///
pub pagination: Option<bcr::models::pagination::Pagination>,
///
pub sorts: Option<Vec<bcr::models::pagination::Sort>>,
}

View File

@ -0,0 +1,134 @@
//!
//!
use super::{models, schema::member_sites};
use beteran_common_rust as bcr;
use diesel::prelude::*;
use diesel::result::Error;
///
pub struct Repository {}
impl std::fmt::Debug for Repository {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
f.debug_struct("Repository of member_sites").finish()
}
}
impl Default for Repository {
fn default() -> Self {
Self::new()
}
}
impl Repository {
///
pub fn new() -> Repository {
Repository {}
}
///
pub fn select(
&self,
conn: &diesel::PgConnection,
id: uuid::Uuid,
) -> Result<models::MemberSite, Error> {
member_sites::table
.find(id as uuid::Uuid)
.first::<models::MemberSite>(conn)
}
///
pub fn select_by_url(
&self,
conn: &diesel::PgConnection,
url: &str,
) -> Result<Option<models::MemberSite>, Error> {
use member_sites::dsl;
match member_sites::table
.filter(dsl::url.eq(url))
.first::<models::MemberSite>(conn)
{
Ok(m) => Ok(Some(m)),
Err(e) => match e {
diesel::result::Error::NotFound => Ok(None),
_ => Err(e),
},
}
}
///
pub fn select_all_count(
&self,
conn: &diesel::PgConnection,
find_all: models::FindAll,
) -> Result<i64, Error> {
let mut q = member_sites::table.into_boxed();
if let Some(sp) = find_all.url_like {
q = q.filter(member_sites::dsl::url.like(sp));
}
q.count().get_result(conn)
}
///
pub fn select_all(
&self,
conn: &diesel::PgConnection,
find_all: models::FindAll,
) -> Result<Vec<models::MemberSite>, Error> {
let mut q = member_sites::table.into_boxed();
if let Some(sp) = find_all.url_like {
q = q.filter(member_sites::dsl::url.like(sp));
}
if let Some(p) = find_all.pagination {
let page = p.page.unwrap_or(1);
if let Some(page_size) = p.page_size {
q = q.offset(((page - 1) * page_size) as i64);
q = q.limit(page_size as i64);
}
}
if let Some(orderbys) = find_all.sorts {
for s in orderbys {
match s {
bcr::models::pagination::Sort::ASC(property) => match property.as_str() {
"url" => {
q = q.order_by(member_sites::url.asc());
}
"created_at" => {
q = q.order_by(member_sites::created_at.asc());
}
"updated_at" => {
q = q.order_by(member_sites::updated_at.asc());
}
"deleted_at" => {
q = q.order_by(member_sites::deleted_at.asc());
}
_ => {}
},
bcr::models::pagination::Sort::DESC(property) => match property.as_str() {
"url" => {
q = q.order_by(member_sites::url.desc());
}
"created_at" => {
q = q.order_by(member_sites::created_at.desc());
}
"updated_at" => {
q = q.order_by(member_sites::updated_at.desc());
}
"deleted_at" => {
q = q.order_by(member_sites::deleted_at.desc());
}
_ => {}
},
};
}
}
q.load::<models::MemberSite>(conn)
}
}

View File

@ -0,0 +1,18 @@
//!
//!
table! {
///
member_sites(id) {
///
id -> Uuid,
///
url -> Text,
///
created_at -> BigInt,
///
updated_at -> BigInt,
///
deleted_at -> Nullable<BigInt>,
}
}

6
src/repositories/mod.rs Normal file
View File

@ -0,0 +1,6 @@
pub mod captcha;
pub mod member;
pub mod member_class;
pub mod member_level;
pub mod member_session;
pub mod member_site;

View File

@ -0,0 +1,2 @@
pub mod models;
pub mod service;

View File

@ -0,0 +1,19 @@
use serde::{Deserialize, Serialize};
// #[derive(Debug, Serialize, Deserialize)]
// struct Claims {
// aud: String, // Optional. Audience
// exp: usize, // Required (validate_exp defaults to true in validation). Expiration time (as UTC timestamp)
// iat: usize, // Optional. Issued at (as UTC timestamp)
// iss: String, // Optional. Issuer
// nbf: usize, // Optional. Not Before (as UTC timestamp)
// sub: String, // Optional. Subject (whom token refers to)
// }
#[derive(Debug, Serialize, Deserialize)]
struct Claims {
iss: String,
iat: usize,
exp: usize,
token: String,
}

View File

@ -0,0 +1,653 @@
//!
//!
use std::str::FromStr;
use super::super::super::repositories;
use beteran_common_rust as bcr;
use beteran_protobuf_rust as bpr;
use diesel::{
r2d2::{ConnectionManager, Pool},
PgConnection,
};
use prost::Message;
///
pub struct Service {
connection_broker: nats::asynk::Connection,
queue_broker: String,
pool: Pool<ConnectionManager<PgConnection>>,
member_repository: repositories::member::repository::Repository,
member_site_repository: repositories::member_site::repository::Repository,
member_session_repository: repositories::member_session::repository::Repository,
captcha_repository: repositories::captcha::repository::Repository,
}
impl std::fmt::Debug for Service {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
f.debug_struct("Service of service.member.service.identity")
.finish()
}
}
impl Service {
///
pub fn new(
connection_broker: nats::asynk::Connection,
queue_broker: String,
pool: Pool<ConnectionManager<PgConnection>>,
) -> Service {
Service {
connection_broker,
queue_broker,
pool,
member_repository: repositories::member::repository::Repository::new(),
member_site_repository: repositories::member_site::repository::Repository::new(),
member_session_repository: repositories::member_session::repository::Repository::new(),
captcha_repository: repositories::captcha::repository::Repository::new(),
}
}
pub async fn subscribe(&self) -> std::result::Result<(), std::boxed::Box<dyn std::error::Error>> {
futures::try_join!(
self.check_username_for_duplication(),
self.check_nickname_for_duplication(),
self.captcha(),
self.signin(),
)
.map(|_| ())
}
async fn check_username_for_duplication(&self) -> Result<(), Box<dyn std::error::Error>> {
let s = self
.connection_broker
.queue_subscribe(
bpr::ss::member::identity::SUBJECT_CHECK_USERNAME_FOR_DUPLICATION,
self.queue_broker.as_str(),
)
.await?;
while let Some(message) = s.next().await {
if let Err(e) = async {
let req = bpr::ss::member::identity::CheckUsernameForDuplicationRequest::decode(
message.data.as_slice(),
)
.map_err(|e| {
bcr::error::rpc::Error::InvalidRequest(bcr::error::rpc::InvalidRequest {
message: format!("invalid request: {}", e),
})
})?;
let client = match req.client {
Some(c) => c,
None => {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid client information".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "client".to_string(),
value: "".to_string(),
error_type: bcr::error::rpc::InvalidParamsType::Required,
message: "".to_string(),
},
},
));
}
};
let conn = self.pool.get().map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server {}", e),
})
})?;
let m = self
.member_repository
.select_by_username(&conn, &req.username)
.map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server {}", e),
})
})?;
message
.respond(
bpr::ss::member::identity::CheckUsernameForDuplicationResponse {
error: None,
result: Some(
bpr::ss::member::identity::check_username_for_duplication_response::Result {
duplicated: m.is_some(),
},
),
}
.encode_to_vec(),
)
.await
.map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server: {}", e),
})
})?;
Ok::<(), bcr::error::rpc::Error>(())
}
.await
{
message
.respond(
bpr::ss::member::identity::CheckUsernameForDuplicationResponse {
error: Some(bpr::protobuf::rpc::Error::from(e)),
result: None,
}
.encode_to_vec(),
)
.await?;
}
}
Ok(())
}
async fn check_nickname_for_duplication(&self) -> Result<(), Box<dyn std::error::Error>> {
let s = self
.connection_broker
.queue_subscribe(
bpr::ss::member::identity::SUBJECT_CHECK_NICKNAME_FOR_DUPLICATION,
self.queue_broker.as_str(),
)
.await?;
while let Some(message) = s.next().await {
if let Err(e) = async {
let req = bpr::ss::member::identity::CheckNicknameForDuplicationRequest::decode(
message.data.as_slice(),
)
.map_err(|e| {
bcr::error::rpc::Error::InvalidRequest(bcr::error::rpc::InvalidRequest {
message: format!("invalid request: {}", e),
})
})?;
let client = match req.client {
Some(c) => c,
None => {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid client information".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "client".to_string(),
value: "".to_string(),
error_type: bcr::error::rpc::InvalidParamsType::Required,
message: "".to_string(),
},
},
));
}
};
let conn = self.pool.get().map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server {}", e),
})
})?;
let m = self
.member_repository
.select_by_nickname(&conn, &req.nickname)
.map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server: {}", e),
})
})?;
message
.respond(
bpr::ss::member::identity::CheckNicknameForDuplicationResponse {
error: None,
result: Some(
bpr::ss::member::identity::check_nickname_for_duplication_response::Result {
duplicated: m.is_some(),
},
),
}
.encode_to_vec(),
)
.await
.map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server: {}", e),
})
})?;
Ok::<(), bcr::error::rpc::Error>(())
}
.await
{
message
.respond(
bpr::ss::member::identity::CheckNicknameForDuplicationResponse {
error: Some(bpr::protobuf::rpc::Error::from(e)),
result: None,
}
.encode_to_vec(),
)
.await?;
}
}
Ok(())
}
async fn captcha(&self) -> Result<(), Box<dyn std::error::Error>> {
let s = self
.connection_broker
.queue_subscribe(
bpr::ss::member::identity::SUBJECT_CAPTCHA,
self.queue_broker.as_str(),
)
.await?;
while let Some(message) = s.next().await {
if let Err(e) = async {
let req = bpr::ss::member::identity::CaptchaRequest::decode(message.data.as_slice())
.map_err(|e| {
bcr::error::rpc::Error::InvalidRequest(bcr::error::rpc::InvalidRequest {
message: format!("invalid request: {}", e),
})
})?;
let client = match req.client {
Some(c) => c,
None => {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid client information".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "client".to_string(),
value: "".to_string(),
error_type: bcr::error::rpc::InvalidParamsType::Required,
message: "".to_string(),
},
},
));
}
};
let site_url = match client.site_url {
Some(site_url) => site_url,
None => {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid site_url information".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "client.site_url".to_string(),
value: "".to_string(),
error_type: bcr::error::rpc::InvalidParamsType::Required,
message: "".to_string(),
},
},
));
}
};
let conn = self.pool.get().map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server {}", e),
})
})?;
match self
.member_site_repository
.select_by_url(&conn, &site_url)
.map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server {}", e),
})
})? {
Some(ms) => ms,
None => {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid site_url information".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "client.site_url".to_string(),
value: "".to_string(),
error_type: bcr::error::rpc::InvalidParamsType::None,
message: "".to_string(),
},
},
));
}
};
let mut c = captcha::Captcha::new();
let c = c
.apply_filter(captcha::filters::Noise::new(0.1))
.view(220, 120);
let image_as_base64 = match c.as_base64() {
Some(s) => s,
None => {
return Err(bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: "captcha image encoding error[base64]".to_string(),
}));
}
};
let security_code = c.chars_as_string();
let expires_at = (chrono::Utc::now() + chrono::Duration::hours(2)).timestamp();
let new_captcha = repositories::captcha::models::NewCaptcha {
security_code,
expires_at,
};
let inserted_captcha = self
.captcha_repository
.insert(&conn, &new_captcha)
.map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server: {}", e),
})
})?;
message
.respond(
bpr::ss::member::identity::CaptchaResponse {
error: None,
result: Some(bpr::ss::member::identity::captcha_response::Result {
token: inserted_captcha.id.to_string(),
image: image_as_base64,
}),
}
.encode_to_vec(),
)
.await
.map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server: {}", e),
})
})?;
Ok::<(), bcr::error::rpc::Error>(())
}
.await
{
message
.respond(
bpr::ss::member::identity::CaptchaResponse {
error: Some(bpr::protobuf::rpc::Error::from(e)),
result: None,
}
.encode_to_vec(),
)
.await?;
}
}
Ok(())
}
async fn signin(&self) -> Result<(), Box<dyn std::error::Error>> {
let s = self
.connection_broker
.queue_subscribe(
bpr::ss::member::identity::SUBJECT_SIGNIN,
self.queue_broker.as_str(),
)
.await?;
while let Some(message) = s.next().await {
if let Err(e) = async {
let req = bpr::ss::member::identity::SigninRequest::decode(message.data.as_slice())
.map_err(|e| {
bcr::error::rpc::Error::InvalidRequest(bcr::error::rpc::InvalidRequest {
message: format!("invalid request: {}", e),
})
})?;
let client = match req.client {
Some(c) => c,
None => {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid client information".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "client".to_string(),
value: "".to_string(),
error_type: bcr::error::rpc::InvalidParamsType::Required,
message: "".to_string(),
},
},
));
}
};
let conn = self.pool.get().map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server {}", e),
})
})?;
let ms = match self
.member_site_repository
.select_by_url(&conn, client.site_url())
.map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server: {}", e),
})
})? {
Some(ms) => ms,
None => {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid site_url information".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "client.site_url".to_string(),
value: "".to_string(),
error_type: bcr::error::rpc::InvalidParamsType::None,
message: "".to_string(),
},
},
));
}
};
let captcha_id = uuid::Uuid::from_str(req.token.as_str()).map_err(|e| {
bcr::error::rpc::Error::InvalidParams(bcr::error::rpc::InvalidParams {
message: "invalid captcha token".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "token".to_string(),
value: "".to_string(),
error_type: bcr::error::rpc::InvalidParamsType::None,
message: e.to_string(),
},
})
})?;
let security_code = req.security_code;
let username = req.username;
let password = req.password;
let captcha = match self
.captcha_repository
.select(&conn, captcha_id)
.map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server: {}", e),
})
})? {
Some(c) => c,
None => {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid captcha token".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "token".to_string(),
value: "".to_string(),
error_type: bcr::error::rpc::InvalidParamsType::None,
message: "".to_string(),
},
},
));
}
};
if !captcha.expires_at < chrono::Utc::now().timestamp() {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid captcha token".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "token".to_string(),
value: "".to_string(),
error_type: bcr::error::rpc::InvalidParamsType::None,
message: "captcha token is expired".to_string(),
},
},
));
}
if !captcha.security_code.eq(&security_code) {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid security_code".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "token".to_string(),
value: security_code.clone(),
error_type: bcr::error::rpc::InvalidParamsType::EqualsTo,
message: "security_code must equal to captcha".to_string(),
},
},
));
}
let m = match self
.member_repository
.select_by_username(&conn, &username)
.map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server: {}", e),
})
})? {
Some(m) => m,
None => {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid username".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "username".to_string(),
value: username,
error_type: bcr::error::rpc::InvalidParamsType::None,
message: "".to_string(),
},
},
));
}
};
if m.member_site_id != ms.id {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid site_url".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "header".to_string(),
param: "client.site_url".to_string(),
value: client.site_url().to_string(),
error_type: bcr::error::rpc::InvalidParamsType::EqualsTo,
message: "".to_string(),
},
},
));
}
if !(argon2::verify_encoded(&m.password, password.as_bytes()).map_err(|e| {
bcr::error::rpc::Error::InvalidParams(bcr::error::rpc::InvalidParams {
message: "invalid password".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "password".to_string(),
value: password.clone(),
error_type: bcr::error::rpc::InvalidParamsType::None,
message: e.to_string(),
},
})
})?) {
return Err(bcr::error::rpc::Error::InvalidParams(
bcr::error::rpc::InvalidParams {
message: "invalid password".to_string(),
detail: bcr::error::rpc::InvalidParamsDetail {
location: "request".to_string(),
param: "password".to_string(),
value: password.clone(),
error_type: bcr::error::rpc::InvalidParamsType::EqualsTo,
message: "".to_string(),
},
},
));
}
let expires_at = (chrono::Utc::now() + chrono::Duration::minutes(30)).timestamp();
let session = self
.member_session_repository
.insert(
&conn,
&repositories::member_session::models::NewMemberSession {
member_id: m.id,
ip: client.client_ip.clone(),
expires_at,
},
)
.map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server: {}", e),
})
})?;
message
.respond(
bpr::ss::member::identity::SigninResponse {
error: None,
result: Some(bpr::ss::member::identity::signin_response::Result {
session_id: session.id.to_string(),
}),
}
.encode_to_vec(),
)
.await
.map_err(|e| {
bcr::error::rpc::Error::Server(bcr::error::rpc::Server {
message: format!("server: {}", e),
})
})?;
Ok::<(), bcr::error::rpc::Error>(())
}
.await
{
message
.respond(
bpr::ss::member::identity::SigninResponse {
error: Some(bpr::protobuf::rpc::Error::from(e)),
result: None,
}
.encode_to_vec(),
)
.await?;
}
}
Ok(())
}
}

1
src/services/mod.rs Normal file
View File

@ -0,0 +1 @@
pub mod identity;