initialized
This commit is contained in:
parent
3ed9708793
commit
abf0478dac
33
.devcontainer/Dockerfile
Normal file
33
.devcontainer/Dockerfile
Normal file
|
@ -0,0 +1,33 @@
|
|||
# Note: You can use any Debian/Ubuntu based image you want.
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/base:0-bullseye
|
||||
|
||||
# [Option] Install zsh
|
||||
ARG INSTALL_ZSH="true"
|
||||
# [Option] Upgrade OS packages to their latest versions
|
||||
ARG UPGRADE_PACKAGES="false"
|
||||
# [Option] Enable non-root Docker access in container
|
||||
ARG ENABLE_NONROOT_DOCKER="true"
|
||||
# [Option] Use the OSS Moby CLI instead of the licensed Docker CLI
|
||||
ARG USE_MOBY="true"
|
||||
|
||||
# Enable new "BUILDKIT" mode for Docker CLI
|
||||
ENV DOCKER_BUILDKIT=1
|
||||
|
||||
# Install needed packages and setup non-root user. Use a separate RUN statement to add your
|
||||
# own dependencies. A user of "automatic" attempts to reuse an user ID if one already exists.
|
||||
ARG USERNAME=automatic
|
||||
ARG USER_UID=1000
|
||||
ARG USER_GID=$USER_UID
|
||||
COPY library-scripts/*.sh /tmp/library-scripts/
|
||||
RUN apt-get update \
|
||||
&& /bin/bash /tmp/library-scripts/common-debian.sh "${INSTALL_ZSH}" "${USERNAME}" "${USER_UID}" "${USER_GID}" "${UPGRADE_PACKAGES}" "true" "true" \
|
||||
# Use Docker script from script library to set things up
|
||||
&& /bin/bash /tmp/library-scripts/docker-debian.sh "${ENABLE_NONROOT_DOCKER}" "/var/run/docker-host.sock" "/var/run/docker.sock" "${USERNAME}" \
|
||||
# Clean up
|
||||
&& apt-get autoremove -y && apt-get clean -y && rm -rf /var/lib/apt/lists/* /tmp/library-scripts/
|
||||
|
||||
# Setting the ENTRYPOINT to docker-init.sh will configure non-root access
|
||||
# to the Docker socket. The script will also execute CMD as needed.
|
||||
ENTRYPOINT [ "/usr/local/share/docker-init.sh" ]
|
||||
CMD [ "sleep", "infinity" ]
|
||||
|
64
.devcontainer/devcontainer.json
Normal file
64
.devcontainer/devcontainer.json
Normal file
|
@ -0,0 +1,64 @@
|
|||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.209.6/containers/docker-from-docker-compose
|
||||
{
|
||||
"name": "beteran-protobuf",
|
||||
"dockerComposeFile": "docker-compose.yml",
|
||||
"service": "app",
|
||||
"workspaceFolder": "/workspace",
|
||||
// Use this environment variable if you need to bind mount your local source code into a new container.
|
||||
"remoteEnv": {
|
||||
"LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}"
|
||||
},
|
||||
// Configure tool-specific properties.
|
||||
"customizations": {
|
||||
// Configure properties specific to VS Code.
|
||||
"vscode": {
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {
|
||||
"lldb.verboseLogging": true,
|
||||
"lldb.executable": "/usr/bin/lldb",
|
||||
"search.exclude": {
|
||||
"**/target": true
|
||||
},
|
||||
// VS Code don't watch files under ./target
|
||||
"files.watcherExclude": {
|
||||
"**/target/**": true
|
||||
},
|
||||
"rust-analyzer.checkOnSave.command": "clippy",
|
||||
"protoc": {
|
||||
"options": [
|
||||
"--proto_path=/usr/local/include/protobuf"
|
||||
]
|
||||
},
|
||||
"editor.tabSize": 2,
|
||||
"editor.insertSpaces": true,
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"dbaeumer.vscode-eslint",
|
||||
"donjayamanne.githistory",
|
||||
"eamodio.gitlens",
|
||||
"matklad.rust-analyzer",
|
||||
"mhutchie.git-graph",
|
||||
"ms-azuretools.vscode-docker",
|
||||
"mutantdino.resourcemonitor",
|
||||
"serayuzgur.crates",
|
||||
"tamasfe.even-better-toml",
|
||||
"vadimcn.vscode-lldb",
|
||||
"zxh404.vscode-proto3"
|
||||
]
|
||||
}
|
||||
},
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
// "postCreateCommand": "docker --version",
|
||||
"postCreateCommand": "bash ./.devcontainer/scripts/postCreateCommand.sh",
|
||||
// Comment out connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "vscode",
|
||||
"features": {
|
||||
"node": "lts",
|
||||
"rust": "latest"
|
||||
}
|
||||
}
|
32
.devcontainer/docker-compose.yml
Normal file
32
.devcontainer/docker-compose.yml
Normal file
|
@ -0,0 +1,32 @@
|
|||
version: '3'
|
||||
|
||||
services:
|
||||
app:
|
||||
build:
|
||||
context: .
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
UPGRADE_PACKAGES: "true"
|
||||
|
||||
volumes:
|
||||
# Forwards the local Docker socket to the container.
|
||||
- /var/run/docker.sock:/var/run/docker-host.sock
|
||||
# Update this to wherever you want VS Code to mount the folder of your project
|
||||
- ..:/workspace:cached
|
||||
|
||||
# Overrides default command so things don't shut down after the process ends.
|
||||
entrypoint: /usr/local/share/docker-init.sh
|
||||
command: sleep infinity
|
||||
# Uncomment the next four lines if you will use a ptrace-based debuggers like C++, Go, and Rust.
|
||||
# cap_add:
|
||||
# - SYS_PTRACE
|
||||
# security_opt:
|
||||
# - seccomp:unconfined
|
||||
|
||||
# Uncomment the next line to use a non-root user for all processes.
|
||||
# user: vscode
|
||||
|
||||
# Use "forwardPorts" in **devcontainer.json** to forward an app port locally.
|
||||
# (Adding the "ports" property to this file will not forward from a Codespace.)
|
||||
environment:
|
||||
- PROTOBUF_VERSION=3.20.0
|
454
.devcontainer/library-scripts/common-debian.sh
Normal file
454
.devcontainer/library-scripts/common-debian.sh
Normal file
|
@ -0,0 +1,454 @@
|
|||
#!/usr/bin/env bash
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/common.md
|
||||
# Maintainer: The VS Code and Codespaces Teams
|
||||
#
|
||||
# Syntax: ./common-debian.sh [install zsh flag] [username] [user UID] [user GID] [upgrade packages flag] [install Oh My Zsh! flag] [Add non-free packages]
|
||||
|
||||
set -e
|
||||
|
||||
INSTALL_ZSH=${1:-"true"}
|
||||
USERNAME=${2:-"automatic"}
|
||||
USER_UID=${3:-"automatic"}
|
||||
USER_GID=${4:-"automatic"}
|
||||
UPGRADE_PACKAGES=${5:-"true"}
|
||||
INSTALL_OH_MYS=${6:-"true"}
|
||||
ADD_NON_FREE_PACKAGES=${7:-"false"}
|
||||
SCRIPT_DIR="$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)"
|
||||
MARKER_FILE="/usr/local/etc/vscode-dev-containers/common"
|
||||
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Ensure that login shells get the correct path if the user updated the PATH using ENV.
|
||||
rm -f /etc/profile.d/00-restore-env.sh
|
||||
echo "export PATH=${PATH//$(sh -lc 'echo $PATH')/\$PATH}" > /etc/profile.d/00-restore-env.sh
|
||||
chmod +x /etc/profile.d/00-restore-env.sh
|
||||
|
||||
# If in automatic mode, determine if a user already exists, if not use vscode
|
||||
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
|
||||
USERNAME=""
|
||||
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
|
||||
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
|
||||
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
|
||||
USERNAME=${CURRENT_USER}
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [ "${USERNAME}" = "" ]; then
|
||||
USERNAME=vscode
|
||||
fi
|
||||
elif [ "${USERNAME}" = "none" ]; then
|
||||
USERNAME=root
|
||||
USER_UID=0
|
||||
USER_GID=0
|
||||
fi
|
||||
|
||||
# Load markers to see which steps have already run
|
||||
if [ -f "${MARKER_FILE}" ]; then
|
||||
echo "Marker file found:"
|
||||
cat "${MARKER_FILE}"
|
||||
source "${MARKER_FILE}"
|
||||
fi
|
||||
|
||||
# Ensure apt is in non-interactive to avoid prompts
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Function to call apt-get if needed
|
||||
apt_get_update_if_needed()
|
||||
{
|
||||
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
else
|
||||
echo "Skipping apt-get update."
|
||||
fi
|
||||
}
|
||||
|
||||
# Run install apt-utils to avoid debconf warning then verify presence of other common developer tools and dependencies
|
||||
if [ "${PACKAGES_ALREADY_INSTALLED}" != "true" ]; then
|
||||
|
||||
package_list="apt-utils \
|
||||
openssh-client \
|
||||
gnupg2 \
|
||||
dirmngr \
|
||||
iproute2 \
|
||||
procps \
|
||||
lsof \
|
||||
htop \
|
||||
net-tools \
|
||||
psmisc \
|
||||
curl \
|
||||
wget \
|
||||
rsync \
|
||||
ca-certificates \
|
||||
unzip \
|
||||
zip \
|
||||
nano \
|
||||
vim-tiny \
|
||||
less \
|
||||
jq \
|
||||
lsb-release \
|
||||
apt-transport-https \
|
||||
dialog \
|
||||
libc6 \
|
||||
libgcc1 \
|
||||
libkrb5-3 \
|
||||
libgssapi-krb5-2 \
|
||||
libicu[0-9][0-9] \
|
||||
liblttng-ust0 \
|
||||
libstdc++6 \
|
||||
zlib1g \
|
||||
locales \
|
||||
sudo \
|
||||
ncdu \
|
||||
man-db \
|
||||
strace \
|
||||
manpages \
|
||||
manpages-dev \
|
||||
init-system-helpers"
|
||||
|
||||
# Needed for adding manpages-posix and manpages-posix-dev which are non-free packages in Debian
|
||||
if [ "${ADD_NON_FREE_PACKAGES}" = "true" ]; then
|
||||
# Bring in variables from /etc/os-release like VERSION_CODENAME
|
||||
. /etc/os-release
|
||||
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb-src http:\/\/(deb|httredir)\.debian\.org\/debian ${VERSION_CODENAME} main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME} main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i -E "s/deb-src http:\/\/(deb|httpredir)\.debian\.org\/debian ${VERSION_CODENAME}-updates main/deb http:\/\/\1\.debian\.org\/debian ${VERSION_CODENAME}-updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}\/updates main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main/deb http:\/\/deb\.debian\.org\/debian ${VERSION_CODENAME}-backports main contrib non-free/" /etc/apt/sources.list
|
||||
# Handle bullseye location for security https://www.debian.org/releases/bullseye/amd64/release-notes/ch-information.en.html
|
||||
sed -i "s/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
|
||||
sed -i "s/deb-src http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main/deb http:\/\/security\.debian\.org\/debian-security ${VERSION_CODENAME}-security main contrib non-free/" /etc/apt/sources.list
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
package_list="${package_list} manpages-posix manpages-posix-dev"
|
||||
else
|
||||
apt_get_update_if_needed
|
||||
fi
|
||||
|
||||
# Install libssl1.1 if available
|
||||
if [[ ! -z $(apt-cache --names-only search ^libssl1.1$) ]]; then
|
||||
package_list="${package_list} libssl1.1"
|
||||
fi
|
||||
|
||||
# Install appropriate version of libssl1.0.x if available
|
||||
libssl_package=$(dpkg-query -f '${db:Status-Abbrev}\t${binary:Package}\n' -W 'libssl1\.0\.?' 2>&1 || echo '')
|
||||
if [ "$(echo "$LIlibssl_packageBSSL" | grep -o 'libssl1\.0\.[0-9]:' | uniq | sort | wc -l)" -eq 0 ]; then
|
||||
if [[ ! -z $(apt-cache --names-only search ^libssl1.0.2$) ]]; then
|
||||
# Debian 9
|
||||
package_list="${package_list} libssl1.0.2"
|
||||
elif [[ ! -z $(apt-cache --names-only search ^libssl1.0.0$) ]]; then
|
||||
# Ubuntu 18.04, 16.04, earlier
|
||||
package_list="${package_list} libssl1.0.0"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Packages to verify are installed: ${package_list}"
|
||||
apt-get -y install --no-install-recommends ${package_list} 2> >( grep -v 'debconf: delaying package configuration, since apt-utils is not installed' >&2 )
|
||||
|
||||
# Install git if not already installed (may be more recent than distro version)
|
||||
if ! type git > /dev/null 2>&1; then
|
||||
apt-get -y install --no-install-recommends git
|
||||
fi
|
||||
|
||||
PACKAGES_ALREADY_INSTALLED="true"
|
||||
fi
|
||||
|
||||
# Get to latest versions of all packages
|
||||
if [ "${UPGRADE_PACKAGES}" = "true" ]; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y upgrade --no-install-recommends
|
||||
apt-get autoremove -y
|
||||
fi
|
||||
|
||||
# Ensure at least the en_US.UTF-8 UTF-8 locale is available.
|
||||
# Common need for both applications and things like the agnoster ZSH theme.
|
||||
if [ "${LOCALE_ALREADY_SET}" != "true" ] && ! grep -o -E '^\s*en_US.UTF-8\s+UTF-8' /etc/locale.gen > /dev/null; then
|
||||
echo "en_US.UTF-8 UTF-8" >> /etc/locale.gen
|
||||
locale-gen
|
||||
LOCALE_ALREADY_SET="true"
|
||||
fi
|
||||
|
||||
# Create or update a non-root user to match UID/GID.
|
||||
group_name="${USERNAME}"
|
||||
if id -u ${USERNAME} > /dev/null 2>&1; then
|
||||
# User exists, update if needed
|
||||
if [ "${USER_GID}" != "automatic" ] && [ "$USER_GID" != "$(id -g $USERNAME)" ]; then
|
||||
group_name="$(id -gn $USERNAME)"
|
||||
groupmod --gid $USER_GID ${group_name}
|
||||
usermod --gid $USER_GID $USERNAME
|
||||
fi
|
||||
if [ "${USER_UID}" != "automatic" ] && [ "$USER_UID" != "$(id -u $USERNAME)" ]; then
|
||||
usermod --uid $USER_UID $USERNAME
|
||||
fi
|
||||
else
|
||||
# Create user
|
||||
if [ "${USER_GID}" = "automatic" ]; then
|
||||
groupadd $USERNAME
|
||||
else
|
||||
groupadd --gid $USER_GID $USERNAME
|
||||
fi
|
||||
if [ "${USER_UID}" = "automatic" ]; then
|
||||
useradd -s /bin/bash --gid $USERNAME -m $USERNAME
|
||||
else
|
||||
useradd -s /bin/bash --uid $USER_UID --gid $USERNAME -m $USERNAME
|
||||
fi
|
||||
fi
|
||||
|
||||
# Add add sudo support for non-root user
|
||||
if [ "${USERNAME}" != "root" ] && [ "${EXISTING_NON_ROOT_USER}" != "${USERNAME}" ]; then
|
||||
echo $USERNAME ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/$USERNAME
|
||||
chmod 0440 /etc/sudoers.d/$USERNAME
|
||||
EXISTING_NON_ROOT_USER="${USERNAME}"
|
||||
fi
|
||||
|
||||
# ** Shell customization section **
|
||||
if [ "${USERNAME}" = "root" ]; then
|
||||
user_rc_path="/root"
|
||||
else
|
||||
user_rc_path="/home/${USERNAME}"
|
||||
fi
|
||||
|
||||
# Restore user .bashrc defaults from skeleton file if it doesn't exist or is empty
|
||||
if [ ! -f "${user_rc_path}/.bashrc" ] || [ ! -s "${user_rc_path}/.bashrc" ] ; then
|
||||
cp /etc/skel/.bashrc "${user_rc_path}/.bashrc"
|
||||
fi
|
||||
|
||||
# Restore user .profile defaults from skeleton file if it doesn't exist or is empty
|
||||
if [ ! -f "${user_rc_path}/.profile" ] || [ ! -s "${user_rc_path}/.profile" ] ; then
|
||||
cp /etc/skel/.profile "${user_rc_path}/.profile"
|
||||
fi
|
||||
|
||||
# .bashrc/.zshrc snippet
|
||||
rc_snippet="$(cat << 'EOF'
|
||||
|
||||
if [ -z "${USER}" ]; then export USER=$(whoami); fi
|
||||
if [[ "${PATH}" != *"$HOME/.local/bin"* ]]; then export PATH="${PATH}:$HOME/.local/bin"; fi
|
||||
|
||||
# Display optional first run image specific notice if configured and terminal is interactive
|
||||
if [ -t 1 ] && [[ "${TERM_PROGRAM}" = "vscode" || "${TERM_PROGRAM}" = "codespaces" ]] && [ ! -f "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed" ]; then
|
||||
if [ -f "/usr/local/etc/vscode-dev-containers/first-run-notice.txt" ]; then
|
||||
cat "/usr/local/etc/vscode-dev-containers/first-run-notice.txt"
|
||||
elif [ -f "/workspaces/.codespaces/shared/first-run-notice.txt" ]; then
|
||||
cat "/workspaces/.codespaces/shared/first-run-notice.txt"
|
||||
fi
|
||||
mkdir -p "$HOME/.config/vscode-dev-containers"
|
||||
# Mark first run notice as displayed after 10s to avoid problems with fast terminal refreshes hiding it
|
||||
((sleep 10s; touch "$HOME/.config/vscode-dev-containers/first-run-notice-already-displayed") &)
|
||||
fi
|
||||
|
||||
# Set the default git editor if not already set
|
||||
if [ -z "$(git config --get core.editor)" ] && [ -z "${GIT_EDITOR}" ]; then
|
||||
if [ "${TERM_PROGRAM}" = "vscode" ]; then
|
||||
if [[ -n $(command -v code-insiders) && -z $(command -v code) ]]; then
|
||||
export GIT_EDITOR="code-insiders --wait"
|
||||
else
|
||||
export GIT_EDITOR="code --wait"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
EOF
|
||||
)"
|
||||
|
||||
# code shim, it fallbacks to code-insiders if code is not available
|
||||
cat << 'EOF' > /usr/local/bin/code
|
||||
#!/bin/sh
|
||||
|
||||
get_in_path_except_current() {
|
||||
which -a "$1" | grep -A1 "$0" | grep -v "$0"
|
||||
}
|
||||
|
||||
code="$(get_in_path_except_current code)"
|
||||
|
||||
if [ -n "$code" ]; then
|
||||
exec "$code" "$@"
|
||||
elif [ "$(command -v code-insiders)" ]; then
|
||||
exec code-insiders "$@"
|
||||
else
|
||||
echo "code or code-insiders is not installed" >&2
|
||||
exit 127
|
||||
fi
|
||||
EOF
|
||||
chmod +x /usr/local/bin/code
|
||||
|
||||
# systemctl shim - tells people to use 'service' if systemd is not running
|
||||
cat << 'EOF' > /usr/local/bin/systemctl
|
||||
#!/bin/sh
|
||||
set -e
|
||||
if [ -d "/run/systemd/system" ]; then
|
||||
exec /bin/systemctl/systemctl "$@"
|
||||
else
|
||||
echo '\n"systemd" is not running in this container due to its overhead.\nUse the "service" command to start services intead. e.g.: \n\nservice --status-all'
|
||||
fi
|
||||
EOF
|
||||
chmod +x /usr/local/bin/systemctl
|
||||
|
||||
# Codespaces bash and OMZ themes - partly inspired by https://github.com/ohmyzsh/ohmyzsh/blob/master/themes/robbyrussell.zsh-theme
|
||||
codespaces_bash="$(cat \
|
||||
<<'EOF'
|
||||
|
||||
# Codespaces bash prompt theme
|
||||
__bash_prompt() {
|
||||
local userpart='`export XIT=$? \
|
||||
&& [ ! -z "${GITHUB_USER}" ] && echo -n "\[\033[0;32m\]@${GITHUB_USER} " || echo -n "\[\033[0;32m\]\u " \
|
||||
&& [ "$XIT" -ne "0" ] && echo -n "\[\033[1;31m\]➜" || echo -n "\[\033[0m\]➜"`'
|
||||
local gitbranch='`\
|
||||
if [ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ]; then \
|
||||
export BRANCH=$(git symbolic-ref --short HEAD 2>/dev/null || git rev-parse --short HEAD 2>/dev/null); \
|
||||
if [ "${BRANCH}" != "" ]; then \
|
||||
echo -n "\[\033[0;36m\](\[\033[1;31m\]${BRANCH}" \
|
||||
&& if git ls-files --error-unmatch -m --directory --no-empty-directory -o --exclude-standard ":/*" > /dev/null 2>&1; then \
|
||||
echo -n " \[\033[1;33m\]✗"; \
|
||||
fi \
|
||||
&& echo -n "\[\033[0;36m\]) "; \
|
||||
fi; \
|
||||
fi`'
|
||||
local lightblue='\[\033[1;34m\]'
|
||||
local removecolor='\[\033[0m\]'
|
||||
PS1="${userpart} ${lightblue}\w ${gitbranch}${removecolor}\$ "
|
||||
unset -f __bash_prompt
|
||||
}
|
||||
__bash_prompt
|
||||
|
||||
EOF
|
||||
)"
|
||||
|
||||
codespaces_zsh="$(cat \
|
||||
<<'EOF'
|
||||
# Codespaces zsh prompt theme
|
||||
__zsh_prompt() {
|
||||
local prompt_username
|
||||
if [ ! -z "${GITHUB_USER}" ]; then
|
||||
prompt_username="@${GITHUB_USER}"
|
||||
else
|
||||
prompt_username="%n"
|
||||
fi
|
||||
PROMPT="%{$fg[green]%}${prompt_username} %(?:%{$reset_color%}➜ :%{$fg_bold[red]%}➜ )" # User/exit code arrow
|
||||
PROMPT+='%{$fg_bold[blue]%}%(5~|%-1~/…/%3~|%4~)%{$reset_color%} ' # cwd
|
||||
PROMPT+='$([ "$(git config --get codespaces-theme.hide-status 2>/dev/null)" != 1 ] && git_prompt_info)' # Git status
|
||||
PROMPT+='%{$fg[white]%}$ %{$reset_color%}'
|
||||
unset -f __zsh_prompt
|
||||
}
|
||||
ZSH_THEME_GIT_PROMPT_PREFIX="%{$fg_bold[cyan]%}(%{$fg_bold[red]%}"
|
||||
ZSH_THEME_GIT_PROMPT_SUFFIX="%{$reset_color%} "
|
||||
ZSH_THEME_GIT_PROMPT_DIRTY=" %{$fg_bold[yellow]%}✗%{$fg_bold[cyan]%})"
|
||||
ZSH_THEME_GIT_PROMPT_CLEAN="%{$fg_bold[cyan]%})"
|
||||
__zsh_prompt
|
||||
|
||||
EOF
|
||||
)"
|
||||
|
||||
# Add RC snippet and custom bash prompt
|
||||
if [ "${RC_SNIPPET_ALREADY_ADDED}" != "true" ]; then
|
||||
echo "${rc_snippet}" >> /etc/bash.bashrc
|
||||
echo "${codespaces_bash}" >> "${user_rc_path}/.bashrc"
|
||||
echo 'export PROMPT_DIRTRIM=4' >> "${user_rc_path}/.bashrc"
|
||||
if [ "${USERNAME}" != "root" ]; then
|
||||
echo "${codespaces_bash}" >> "/root/.bashrc"
|
||||
echo 'export PROMPT_DIRTRIM=4' >> "/root/.bashrc"
|
||||
fi
|
||||
chown ${USERNAME}:${group_name} "${user_rc_path}/.bashrc"
|
||||
RC_SNIPPET_ALREADY_ADDED="true"
|
||||
fi
|
||||
|
||||
# Optionally install and configure zsh and Oh My Zsh!
|
||||
if [ "${INSTALL_ZSH}" = "true" ]; then
|
||||
if ! type zsh > /dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get install -y zsh
|
||||
fi
|
||||
if [ "${ZSH_ALREADY_INSTALLED}" != "true" ]; then
|
||||
echo "${rc_snippet}" >> /etc/zsh/zshrc
|
||||
ZSH_ALREADY_INSTALLED="true"
|
||||
fi
|
||||
|
||||
# Adapted, simplified inline Oh My Zsh! install steps that adds, defaults to a codespaces theme.
|
||||
# See https://github.com/ohmyzsh/ohmyzsh/blob/master/tools/install.sh for official script.
|
||||
oh_my_install_dir="${user_rc_path}/.oh-my-zsh"
|
||||
if [ ! -d "${oh_my_install_dir}" ] && [ "${INSTALL_OH_MYS}" = "true" ]; then
|
||||
template_path="${oh_my_install_dir}/templates/zshrc.zsh-template"
|
||||
user_rc_file="${user_rc_path}/.zshrc"
|
||||
umask g-w,o-w
|
||||
mkdir -p ${oh_my_install_dir}
|
||||
git clone --depth=1 \
|
||||
-c core.eol=lf \
|
||||
-c core.autocrlf=false \
|
||||
-c fsck.zeroPaddedFilemode=ignore \
|
||||
-c fetch.fsck.zeroPaddedFilemode=ignore \
|
||||
-c receive.fsck.zeroPaddedFilemode=ignore \
|
||||
"https://github.com/ohmyzsh/ohmyzsh" "${oh_my_install_dir}" 2>&1
|
||||
echo -e "$(cat "${template_path}")\nDISABLE_AUTO_UPDATE=true\nDISABLE_UPDATE_PROMPT=true" > ${user_rc_file}
|
||||
sed -i -e 's/ZSH_THEME=.*/ZSH_THEME="codespaces"/g' ${user_rc_file}
|
||||
|
||||
mkdir -p ${oh_my_install_dir}/custom/themes
|
||||
echo "${codespaces_zsh}" > "${oh_my_install_dir}/custom/themes/codespaces.zsh-theme"
|
||||
# Shrink git while still enabling updates
|
||||
cd "${oh_my_install_dir}"
|
||||
git repack -a -d -f --depth=1 --window=1
|
||||
# Copy to non-root user if one is specified
|
||||
if [ "${USERNAME}" != "root" ]; then
|
||||
cp -rf "${user_rc_file}" "${oh_my_install_dir}" /root
|
||||
chown -R ${USERNAME}:${group_name} "${user_rc_path}"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Persist image metadata info, script if meta.env found in same directory
|
||||
meta_info_script="$(cat << 'EOF'
|
||||
#!/bin/sh
|
||||
. /usr/local/etc/vscode-dev-containers/meta.env
|
||||
|
||||
# Minimal output
|
||||
if [ "$1" = "version" ] || [ "$1" = "image-version" ]; then
|
||||
echo "${VERSION}"
|
||||
exit 0
|
||||
elif [ "$1" = "release" ]; then
|
||||
echo "${GIT_REPOSITORY_RELEASE}"
|
||||
exit 0
|
||||
elif [ "$1" = "content" ] || [ "$1" = "content-url" ] || [ "$1" = "contents" ] || [ "$1" = "contents-url" ]; then
|
||||
echo "${CONTENTS_URL}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
#Full output
|
||||
echo
|
||||
echo "Development container image information"
|
||||
echo
|
||||
if [ ! -z "${VERSION}" ]; then echo "- Image version: ${VERSION}"; fi
|
||||
if [ ! -z "${DEFINITION_ID}" ]; then echo "- Definition ID: ${DEFINITION_ID}"; fi
|
||||
if [ ! -z "${VARIANT}" ]; then echo "- Variant: ${VARIANT}"; fi
|
||||
if [ ! -z "${GIT_REPOSITORY}" ]; then echo "- Source code repository: ${GIT_REPOSITORY}"; fi
|
||||
if [ ! -z "${GIT_REPOSITORY_RELEASE}" ]; then echo "- Source code release/branch: ${GIT_REPOSITORY_RELEASE}"; fi
|
||||
if [ ! -z "${BUILD_TIMESTAMP}" ]; then echo "- Timestamp: ${BUILD_TIMESTAMP}"; fi
|
||||
if [ ! -z "${CONTENTS_URL}" ]; then echo && echo "More info: ${CONTENTS_URL}"; fi
|
||||
echo
|
||||
EOF
|
||||
)"
|
||||
if [ -f "${SCRIPT_DIR}/meta.env" ]; then
|
||||
mkdir -p /usr/local/etc/vscode-dev-containers/
|
||||
cp -f "${SCRIPT_DIR}/meta.env" /usr/local/etc/vscode-dev-containers/meta.env
|
||||
echo "${meta_info_script}" > /usr/local/bin/devcontainer-info
|
||||
chmod +x /usr/local/bin/devcontainer-info
|
||||
fi
|
||||
|
||||
# Write marker file
|
||||
mkdir -p "$(dirname "${MARKER_FILE}")"
|
||||
echo -e "\
|
||||
PACKAGES_ALREADY_INSTALLED=${PACKAGES_ALREADY_INSTALLED}\n\
|
||||
LOCALE_ALREADY_SET=${LOCALE_ALREADY_SET}\n\
|
||||
EXISTING_NON_ROOT_USER=${EXISTING_NON_ROOT_USER}\n\
|
||||
RC_SNIPPET_ALREADY_ADDED=${RC_SNIPPET_ALREADY_ADDED}\n\
|
||||
ZSH_ALREADY_INSTALLED=${ZSH_ALREADY_INSTALLED}" > "${MARKER_FILE}"
|
||||
|
||||
echo "Done!"
|
309
.devcontainer/library-scripts/docker-debian.sh
Normal file
309
.devcontainer/library-scripts/docker-debian.sh
Normal file
|
@ -0,0 +1,309 @@
|
|||
#!/usr/bin/env bash
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
#
|
||||
# Docs: https://github.com/microsoft/vscode-dev-containers/blob/main/script-library/docs/docker.md
|
||||
# Maintainer: The VS Code and Codespaces Teams
|
||||
#
|
||||
# Syntax: ./docker-debian.sh [enable non-root docker socket access flag] [source socket] [target socket] [non-root user] [use moby] [CLI version]
|
||||
|
||||
ENABLE_NONROOT_DOCKER=${1:-"true"}
|
||||
SOURCE_SOCKET=${2:-"/var/run/docker-host.sock"}
|
||||
TARGET_SOCKET=${3:-"/var/run/docker.sock"}
|
||||
USERNAME=${4:-"automatic"}
|
||||
USE_MOBY=${5:-"true"}
|
||||
DOCKER_VERSION=${6:-"latest"}
|
||||
MICROSOFT_GPG_KEYS_URI="https://packages.microsoft.com/keys/microsoft.asc"
|
||||
DOCKER_DASH_COMPOSE_VERSION="1"
|
||||
|
||||
set -e
|
||||
|
||||
if [ "$(id -u)" -ne 0 ]; then
|
||||
echo -e 'Script must be run as root. Use sudo, su, or add "USER root" to your Dockerfile before running this script.'
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Determine the appropriate non-root user
|
||||
if [ "${USERNAME}" = "auto" ] || [ "${USERNAME}" = "automatic" ]; then
|
||||
USERNAME=""
|
||||
POSSIBLE_USERS=("vscode" "node" "codespace" "$(awk -v val=1000 -F ":" '$3==val{print $1}' /etc/passwd)")
|
||||
for CURRENT_USER in ${POSSIBLE_USERS[@]}; do
|
||||
if id -u ${CURRENT_USER} > /dev/null 2>&1; then
|
||||
USERNAME=${CURRENT_USER}
|
||||
break
|
||||
fi
|
||||
done
|
||||
if [ "${USERNAME}" = "" ]; then
|
||||
USERNAME=root
|
||||
fi
|
||||
elif [ "${USERNAME}" = "none" ] || ! id -u ${USERNAME} > /dev/null 2>&1; then
|
||||
USERNAME=root
|
||||
fi
|
||||
|
||||
# Get central common setting
|
||||
get_common_setting() {
|
||||
if [ "${common_settings_file_loaded}" != "true" ]; then
|
||||
curl -sfL "https://aka.ms/vscode-dev-containers/script-library/settings.env" 2>/dev/null -o /tmp/vsdc-settings.env || echo "Could not download settings file. Skipping."
|
||||
common_settings_file_loaded=true
|
||||
fi
|
||||
if [ -f "/tmp/vsdc-settings.env" ]; then
|
||||
local multi_line=""
|
||||
if [ "$2" = "true" ]; then multi_line="-z"; fi
|
||||
local result="$(grep ${multi_line} -oP "$1=\"?\K[^\"]+" /tmp/vsdc-settings.env | tr -d '\0')"
|
||||
if [ ! -z "${result}" ]; then declare -g $1="${result}"; fi
|
||||
fi
|
||||
echo "$1=${!1}"
|
||||
}
|
||||
|
||||
# Function to run apt-get if needed
|
||||
apt_get_update_if_needed()
|
||||
{
|
||||
if [ ! -d "/var/lib/apt/lists" ] || [ "$(ls /var/lib/apt/lists/ | wc -l)" = "0" ]; then
|
||||
echo "Running apt-get update..."
|
||||
apt-get update
|
||||
else
|
||||
echo "Skipping apt-get update."
|
||||
fi
|
||||
}
|
||||
|
||||
# Checks if packages are installed and installs them if not
|
||||
check_packages() {
|
||||
if ! dpkg -s "$@" > /dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install --no-install-recommends "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Figure out correct version of a three part version number is not passed
|
||||
find_version_from_git_tags() {
|
||||
local variable_name=$1
|
||||
local requested_version=${!variable_name}
|
||||
if [ "${requested_version}" = "none" ]; then return; fi
|
||||
local repository=$2
|
||||
local prefix=${3:-"tags/v"}
|
||||
local separator=${4:-"."}
|
||||
local last_part_optional=${5:-"false"}
|
||||
if [ "$(echo "${requested_version}" | grep -o "." | wc -l)" != "2" ]; then
|
||||
local escaped_separator=${separator//./\\.}
|
||||
local last_part
|
||||
if [ "${last_part_optional}" = "true" ]; then
|
||||
last_part="(${escaped_separator}[0-9]+)?"
|
||||
else
|
||||
last_part="${escaped_separator}[0-9]+"
|
||||
fi
|
||||
local regex="${prefix}\\K[0-9]+${escaped_separator}[0-9]+${last_part}$"
|
||||
local version_list="$(git ls-remote --tags ${repository} | grep -oP "${regex}" | tr -d ' ' | tr "${separator}" "." | sort -rV)"
|
||||
if [ "${requested_version}" = "latest" ] || [ "${requested_version}" = "current" ] || [ "${requested_version}" = "lts" ]; then
|
||||
declare -g ${variable_name}="$(echo "${version_list}" | head -n 1)"
|
||||
else
|
||||
set +e
|
||||
declare -g ${variable_name}="$(echo "${version_list}" | grep -E -m 1 "^${requested_version//./\\.}([\\.\\s]|$)")"
|
||||
set -e
|
||||
fi
|
||||
fi
|
||||
if [ -z "${!variable_name}" ] || ! echo "${version_list}" | grep "^${!variable_name//./\\.}$" > /dev/null 2>&1; then
|
||||
echo -e "Invalid ${variable_name} value: ${requested_version}\nValid values:\n${version_list}" >&2
|
||||
exit 1
|
||||
fi
|
||||
echo "${variable_name}=${!variable_name}"
|
||||
}
|
||||
|
||||
# Ensure apt is in non-interactive to avoid prompts
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Install dependencies
|
||||
check_packages apt-transport-https curl ca-certificates gnupg2 dirmngr
|
||||
if ! type git > /dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install git
|
||||
fi
|
||||
|
||||
# Source /etc/os-release to get OS info
|
||||
. /etc/os-release
|
||||
# Fetch host/container arch.
|
||||
architecture="$(dpkg --print-architecture)"
|
||||
|
||||
# Set up the necessary apt repos (either Microsoft's or Docker's)
|
||||
if [ "${USE_MOBY}" = "true" ]; then
|
||||
|
||||
cli_package_name="moby-cli"
|
||||
|
||||
# Import key safely and import Microsoft apt repo
|
||||
get_common_setting MICROSOFT_GPG_KEYS_URI
|
||||
curl -sSL ${MICROSOFT_GPG_KEYS_URI} | gpg --dearmor > /usr/share/keyrings/microsoft-archive-keyring.gpg
|
||||
echo "deb [arch=${architecture} signed-by=/usr/share/keyrings/microsoft-archive-keyring.gpg] https://packages.microsoft.com/repos/microsoft-${ID}-${VERSION_CODENAME}-prod ${VERSION_CODENAME} main" > /etc/apt/sources.list.d/microsoft.list
|
||||
else
|
||||
# Name of proprietary engine package
|
||||
cli_package_name="docker-ce-cli"
|
||||
|
||||
# Import key safely and import Docker apt repo
|
||||
curl -fsSL https://download.docker.com/linux/${ID}/gpg | gpg --dearmor > /usr/share/keyrings/docker-archive-keyring.gpg
|
||||
echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/${ID} ${VERSION_CODENAME} stable" > /etc/apt/sources.list.d/docker.list
|
||||
fi
|
||||
|
||||
# Refresh apt lists
|
||||
apt-get update
|
||||
|
||||
# Soft version matching for CLI
|
||||
if [ "${DOCKER_VERSION}" = "latest" ] || [ "${DOCKER_VERSION}" = "lts" ] || [ "${DOCKER_VERSION}" = "stable" ]; then
|
||||
# Empty, meaning grab whatever "latest" is in apt repo
|
||||
cli_version_suffix=""
|
||||
else
|
||||
# Fetch a valid version from the apt-cache (eg: the Microsoft repo appends +azure, breakfix, etc...)
|
||||
docker_version_dot_escaped="${DOCKER_VERSION//./\\.}"
|
||||
docker_version_dot_plus_escaped="${docker_version_dot_escaped//+/\\+}"
|
||||
# Regex needs to handle debian package version number format: https://www.systutorials.com/docs/linux/man/5-deb-version/
|
||||
docker_version_regex="^(.+:)?${docker_version_dot_plus_escaped}([\\.\\+ ~:-]|$)"
|
||||
set +e # Don't exit if finding version fails - will handle gracefully
|
||||
cli_version_suffix="=$(apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | sed -e 's/^[ \t]*//' | grep -E -m 1 "${docker_version_regex}")"
|
||||
set -e
|
||||
if [ -z "${cli_version_suffix}" ] || [ "${cli_version_suffix}" = "=" ]; then
|
||||
echo "(!) No full or partial Docker / Moby version match found for \"${DOCKER_VERSION}\" on OS ${ID} ${VERSION_CODENAME} (${architecture}). Available versions:"
|
||||
apt-cache madison ${cli_package_name} | awk -F"|" '{print $2}' | grep -oP '^(.+:)?\K.+'
|
||||
exit 1
|
||||
fi
|
||||
echo "cli_version_suffix ${cli_version_suffix}"
|
||||
fi
|
||||
|
||||
# Install Docker / Moby CLI if not already installed
|
||||
if type docker > /dev/null 2>&1; then
|
||||
echo "Docker / Moby CLI already installed."
|
||||
else
|
||||
if [ "${USE_MOBY}" = "true" ]; then
|
||||
apt-get -y install --no-install-recommends moby-cli${cli_version_suffix} moby-buildx
|
||||
apt-get -y install --no-install-recommends moby-compose || echo "(*) Package moby-compose (Docker Compose v2) not available for OS ${ID} ${VERSION_CODENAME} (${architecture}). Skipping."
|
||||
else
|
||||
apt-get -y install --no-install-recommends docker-ce-cli${cli_version_suffix}
|
||||
fi
|
||||
fi
|
||||
|
||||
# Install Docker Compose if not already installed and is on a supported architecture
|
||||
if type docker-compose > /dev/null 2>&1; then
|
||||
echo "Docker Compose already installed."
|
||||
else
|
||||
TARGET_COMPOSE_ARCH="$(uname -m)"
|
||||
if [ "${TARGET_COMPOSE_ARCH}" = "amd64" ]; then
|
||||
TARGET_COMPOSE_ARCH="x86_64"
|
||||
fi
|
||||
if [ "${TARGET_COMPOSE_ARCH}" != "x86_64" ]; then
|
||||
# Use pip to get a version that runns on this architecture
|
||||
if ! dpkg -s python3-minimal python3-pip libffi-dev python3-venv > /dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install python3-minimal python3-pip libffi-dev python3-venv
|
||||
fi
|
||||
export PIPX_HOME=/usr/local/pipx
|
||||
mkdir -p ${PIPX_HOME}
|
||||
export PIPX_BIN_DIR=/usr/local/bin
|
||||
export PYTHONUSERBASE=/tmp/pip-tmp
|
||||
export PIP_CACHE_DIR=/tmp/pip-tmp/cache
|
||||
pipx_bin=pipx
|
||||
if ! type pipx > /dev/null 2>&1; then
|
||||
pip3 install --disable-pip-version-check --no-cache-dir --user pipx
|
||||
pipx_bin=/tmp/pip-tmp/bin/pipx
|
||||
fi
|
||||
${pipx_bin} install --pip-args '--no-cache-dir --force-reinstall' docker-compose
|
||||
rm -rf /tmp/pip-tmp
|
||||
else
|
||||
find_version_from_git_tags DOCKER_DASH_COMPOSE_VERSION "https://github.com/docker/compose" "tags/"
|
||||
echo "(*) Installing docker-compose ${DOCKER_DASH_COMPOSE_VERSION}..."
|
||||
curl -fsSL "https://github.com/docker/compose/releases/download/${DOCKER_DASH_COMPOSE_VERSION}/docker-compose-Linux-x86_64" -o /usr/local/bin/docker-compose
|
||||
chmod +x /usr/local/bin/docker-compose
|
||||
fi
|
||||
fi
|
||||
|
||||
# If init file already exists, exit
|
||||
if [ -f "/usr/local/share/docker-init.sh" ]; then
|
||||
exit 0
|
||||
fi
|
||||
echo "docker-init doesnt exist, adding..."
|
||||
|
||||
# By default, make the source and target sockets the same
|
||||
if [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ]; then
|
||||
touch "${SOURCE_SOCKET}"
|
||||
ln -s "${SOURCE_SOCKET}" "${TARGET_SOCKET}"
|
||||
fi
|
||||
|
||||
# Add a stub if not adding non-root user access, user is root
|
||||
if [ "${ENABLE_NONROOT_DOCKER}" = "false" ] || [ "${USERNAME}" = "root" ]; then
|
||||
echo '/usr/bin/env bash -c "\$@"' > /usr/local/share/docker-init.sh
|
||||
chmod +x /usr/local/share/docker-init.sh
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# If enabling non-root access and specified user is found, setup socat and add script
|
||||
chown -h "${USERNAME}":root "${TARGET_SOCKET}"
|
||||
if ! dpkg -s socat > /dev/null 2>&1; then
|
||||
apt_get_update_if_needed
|
||||
apt-get -y install socat
|
||||
fi
|
||||
tee /usr/local/share/docker-init.sh > /dev/null \
|
||||
<< EOF
|
||||
#!/usr/bin/env bash
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
# Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
# Licensed under the MIT License. See https://go.microsoft.com/fwlink/?linkid=2090316 for license information.
|
||||
#-------------------------------------------------------------------------------------------------------------
|
||||
|
||||
set -e
|
||||
|
||||
SOCAT_PATH_BASE=/tmp/vscr-docker-from-docker
|
||||
SOCAT_LOG=\${SOCAT_PATH_BASE}.log
|
||||
SOCAT_PID=\${SOCAT_PATH_BASE}.pid
|
||||
|
||||
# Wrapper function to only use sudo if not already root
|
||||
sudoIf()
|
||||
{
|
||||
if [ "\$(id -u)" -ne 0 ]; then
|
||||
sudo "\$@"
|
||||
else
|
||||
"\$@"
|
||||
fi
|
||||
}
|
||||
|
||||
# Log messages
|
||||
log()
|
||||
{
|
||||
echo -e "[\$(date)] \$@" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
|
||||
}
|
||||
|
||||
echo -e "\n** \$(date) **" | sudoIf tee -a \${SOCAT_LOG} > /dev/null
|
||||
log "Ensuring ${USERNAME} has access to ${SOURCE_SOCKET} via ${TARGET_SOCKET}"
|
||||
|
||||
# If enabled, try to add a docker group with the right GID. If the group is root,
|
||||
# fall back on using socat to forward the docker socket to another unix socket so
|
||||
# that we can set permissions on it without affecting the host.
|
||||
if [ "${ENABLE_NONROOT_DOCKER}" = "true" ] && [ "${SOURCE_SOCKET}" != "${TARGET_SOCKET}" ] && [ "${USERNAME}" != "root" ] && [ "${USERNAME}" != "0" ]; then
|
||||
SOCKET_GID=\$(stat -c '%g' ${SOURCE_SOCKET})
|
||||
if [ "\${SOCKET_GID}" != "0" ]; then
|
||||
log "Adding user to group with GID \${SOCKET_GID}."
|
||||
if [ "\$(cat /etc/group | grep :\${SOCKET_GID}:)" = "" ]; then
|
||||
sudoIf groupadd --gid \${SOCKET_GID} docker-host
|
||||
fi
|
||||
# Add user to group if not already in it
|
||||
if [ "\$(id ${USERNAME} | grep -E "groups.*(=|,)\${SOCKET_GID}\(")" = "" ]; then
|
||||
sudoIf usermod -aG \${SOCKET_GID} ${USERNAME}
|
||||
fi
|
||||
else
|
||||
# Enable proxy if not already running
|
||||
if [ ! -f "\${SOCAT_PID}" ] || ! ps -p \$(cat \${SOCAT_PID}) > /dev/null; then
|
||||
log "Enabling socket proxy."
|
||||
log "Proxying ${SOURCE_SOCKET} to ${TARGET_SOCKET} for vscode"
|
||||
sudoIf rm -rf ${TARGET_SOCKET}
|
||||
(sudoIf socat UNIX-LISTEN:${TARGET_SOCKET},fork,mode=660,user=${USERNAME} UNIX-CONNECT:${SOURCE_SOCKET} 2>&1 | sudoIf tee -a \${SOCAT_LOG} > /dev/null & echo "\$!" | sudoIf tee \${SOCAT_PID} > /dev/null)
|
||||
else
|
||||
log "Socket proxy already running."
|
||||
fi
|
||||
fi
|
||||
log "Success"
|
||||
fi
|
||||
|
||||
# Execute whatever commands were passed in (if any). This allows us
|
||||
# to set this script to ENTRYPOINT while still executing the default CMD.
|
||||
set +e
|
||||
exec "\$@"
|
||||
EOF
|
||||
chmod +x /usr/local/share/docker-init.sh
|
||||
chown ${USERNAME}:root /usr/local/share/docker-init.sh
|
||||
echo "Done!"
|
7
.devcontainer/rust-toolchain.toml
Normal file
7
.devcontainer/rust-toolchain.toml
Normal file
|
@ -0,0 +1,7 @@
|
|||
[toolchain]
|
||||
channel = "stable"
|
||||
profile = "minimal"
|
||||
components = ["clippy", "rustfmt"]
|
||||
targets = [
|
||||
|
||||
]
|
17
.devcontainer/scripts/postCreateCommand.sh
Normal file
17
.devcontainer/scripts/postCreateCommand.sh
Normal file
|
@ -0,0 +1,17 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
# sudo apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
# && sudo apt-get -y install --no-install-recommends protobuf-compiler
|
||||
|
||||
curl -fsSL "https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOBUF_VERSION}/protoc-${PROTOBUF_VERSION}-linux-x86_64.zip" -o /tmp/protoc-${PROTOBUF_VERSION}-linux-x86_64.zip
|
||||
unzip /tmp/protoc-${PROTOBUF_VERSION}-linux-x86_64.zip -d /tmp/protoc-${PROTOBUF_VERSION}-linux-x86_64
|
||||
sudo mv /tmp/protoc-${PROTOBUF_VERSION}-linux-x86_64/bin/protoc /usr/local/bin/protoc
|
||||
sudo chmod +x /usr/local/bin/protoc
|
||||
sudo mkdir -p /usr/local/include/protobuf
|
||||
sudo mv /tmp/protoc-${PROTOBUF_VERSION}-linux-x86_64/include/* /usr/local/include/protobuf
|
||||
rm -fR /tmp/protoc-${PROTOBUF_VERSION}-linux-x86_64 /tmp/protoc-${PROTOBUF_VERSION}-linux-x86_64.zip
|
||||
|
||||
|
||||
npm install -g ts-protoc-gen
|
11
.gitignore
vendored
Normal file
11
.gitignore
vendored
Normal file
|
@ -0,0 +1,11 @@
|
|||
.DS_Store
|
||||
/build
|
||||
|
||||
# Added by cargo
|
||||
|
||||
/target
|
||||
Cargo.lock
|
||||
|
||||
# Added by protobuf
|
||||
|
||||
include
|
75
.rustfmt.toml
Normal file
75
.rustfmt.toml
Normal file
|
@ -0,0 +1,75 @@
|
|||
# https://rust-lang.github.io/rustfmt/?version=v1.4.38&search=
|
||||
array_width = 60
|
||||
attr_fn_like_width = 70
|
||||
binop_separator = "Front" # "Front", "Back"
|
||||
blank_lines_lower_bound = 0
|
||||
blank_lines_upper_bound = 1
|
||||
brace_style = "SameLineWhere" #"AlwaysNextLine", "PreferSameLine", "SameLineWhere"
|
||||
chain_width = 60
|
||||
color = "Auto" #"Auto", "Always", "Never"
|
||||
combine_control_expr = true # true, false
|
||||
comment_width = 80
|
||||
condense_wildcard_suffixes = false # true, false
|
||||
control_brace_style = "AlwaysSameLine" # "AlwaysNextLine", "AlwaysSameLine", "ClosingNextLine"
|
||||
disable_all_formatting = false # true, false
|
||||
edition = "2015" # "2015", "2018", "2021"
|
||||
empty_item_single_line = true # true, false
|
||||
enum_discrim_align_threshold = 0
|
||||
error_on_line_overflow = false # true, false
|
||||
error_on_unformatted = false # true, false
|
||||
fn_args_layout = "Tall" # "Compressed", "Tall", "Vertical"
|
||||
fn_call_width = 60
|
||||
fn_single_line = false # true, false
|
||||
force_explicit_abi = true # true, false
|
||||
force_multiline_blocks = false # true, false
|
||||
format_code_in_doc_comments = false # true, false
|
||||
format_generated_files = false # true, false
|
||||
format_macro_matchers = false # true, false
|
||||
format_macro_bodies = true # true, false
|
||||
format_strings = false # true, false
|
||||
group_imports = "Preserve" # "Preserve", "StdExternalCrate"
|
||||
hard_tabs = false # true, false
|
||||
hex_literal_case = "Preserve" # "Upper", "Lower"
|
||||
hide_parse_errors = false # true, false
|
||||
ignore = []
|
||||
imports_indent = "Block" # "Block", "Visual"
|
||||
imports_layout = "Mixed" # "Horizontal", "HorizontalVertical", "Mixed", "Vertical"
|
||||
indent_style = "Block" # "Block", "Visual"
|
||||
inline_attribute_width = 0
|
||||
license_template_path = ""
|
||||
match_arm_blocks = true # true, false
|
||||
match_arm_leading_pipes = "Never" # "Always", "Never", "Preserve"
|
||||
match_block_trailing_comma = false # true, false
|
||||
max_width = 100
|
||||
merge_derives = true # true, false
|
||||
imports_granularity = "Preserve" # "Preserve", "Crate", "Module", "Item", "One"
|
||||
merge_imports = false # true, false
|
||||
newline_style = "Auto" # "Auto", "Native", "Unix", "Windows"
|
||||
normalize_comments = false # true, false
|
||||
normalize_doc_attributes = false # true, false
|
||||
overflow_delimited_expr = false # true, false
|
||||
remove_nested_parens = true # true, false
|
||||
reorder_impl_items = false # true, false
|
||||
reorder_imports = true # true, false
|
||||
reorder_modules = true # true, false
|
||||
report_fixme = "Never" # "Always", "Unnumbered", "Never"
|
||||
report_todo = "Never" # "Always", "Unnumbered", "Never"
|
||||
skip_children = false # true, false
|
||||
single_line_if_else_max_width = 50
|
||||
space_after_colon = true # true, false
|
||||
space_before_colon = false # true, false
|
||||
spaces_around_ranges = false # true, false
|
||||
struct_field_align_threshold = 0
|
||||
struct_lit_single_line = true # true, false
|
||||
struct_lit_width = 18
|
||||
struct_variant_width = 35
|
||||
tab_spaces = 2
|
||||
trailing_comma = "Vertical" # "Always", "Never", "Vertical"
|
||||
trailing_semicolon = true # true, false
|
||||
type_punctuation_density = "Wide" # "Compressed", "Wide"
|
||||
unstable_features = false # true, false
|
||||
use_field_init_shorthand = false # true, false
|
||||
use_small_heuristics = "Default" # "Default", "Off", "Max"
|
||||
use_try_shorthand = false # true, false
|
||||
where_single_line = false # true, false
|
||||
wrap_comments = false # true, false
|
18
Cargo.toml
Normal file
18
Cargo.toml
Normal file
|
@ -0,0 +1,18 @@
|
|||
[package]
|
||||
name = "beteran-protobuf"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
[lib]
|
||||
name = "beteran_protobuf"
|
||||
path = "./src/lib.rs"
|
||||
|
||||
|
||||
[dependencies]
|
||||
prost = "0.10"
|
||||
|
||||
[build-dependencies]
|
||||
prost-build = "0.10"
|
||||
once_cell = "1"
|
122
build.rs
Normal file
122
build.rs
Normal file
|
@ -0,0 +1,122 @@
|
|||
//! client client
|
||||
//! NATS(c2se) NATS(c2se)
|
||||
//! <---> <--->
|
||||
//! client broker
|
||||
//! <---> <--->
|
||||
//! NATS(se2c) NATS(se2c)
|
||||
//! server edge server edge
|
||||
//! NATS(se) NATS(se)
|
||||
//! <---> <--->
|
||||
//! server service server broker server service
|
||||
//! NATS(ss) <---> <---> NATS(ss)
|
||||
//!
|
||||
|
||||
use once_cell::sync::Lazy;
|
||||
use std::{collections::HashMap, fs, path::Path, process::Command};
|
||||
|
||||
static PROTO_PATHS: Lazy<Vec<&str>> =
|
||||
Lazy::new(|| vec!["/usr/local/include/protobuf/", "./proto/"]);
|
||||
|
||||
static TARGETS: Lazy<HashMap<&str, Vec<&str>>> = Lazy::new(|| {
|
||||
println!("initializing");
|
||||
let mut m = HashMap::new();
|
||||
|
||||
m.insert(
|
||||
"protobuf",
|
||||
vec![
|
||||
"./proto/protobuf/rpc/error.proto",
|
||||
"./proto/protobuf/pagination/pagination.proto",
|
||||
"./proto/protobuf/pagination/search.proto",
|
||||
"./proto/protobuf/pagination/sort.proto",
|
||||
],
|
||||
);
|
||||
|
||||
m.insert(
|
||||
"models",
|
||||
vec![
|
||||
"./proto/models/member/member_class.proto",
|
||||
"./proto/models/member/member_level.proto",
|
||||
"./proto/models/member/member_site.proto",
|
||||
"./proto/models/member/member.proto",
|
||||
],
|
||||
);
|
||||
|
||||
m.insert(
|
||||
"c2se",
|
||||
vec![
|
||||
"./proto/c2se/backend/identity.proto",
|
||||
"./proto/c2se/backend/member.proto",
|
||||
"./proto/c2se/frontend/identity.proto",
|
||||
],
|
||||
);
|
||||
|
||||
m
|
||||
});
|
||||
|
||||
fn main() {
|
||||
let build_path = "./build";
|
||||
if Path::new(build_path).exists() {
|
||||
fs::remove_dir_all(build_path).expect("clean for library directory is failed");
|
||||
}
|
||||
fs::create_dir_all(build_path).expect("creating for library directory is failed");
|
||||
|
||||
let build_path_rust = format!("{}/rust", build_path);
|
||||
if Path::new(&build_path_rust).exists() {
|
||||
fs::remove_dir_all(&build_path_rust).expect("clean for rust library directory is failed");
|
||||
}
|
||||
fs::create_dir_all(&build_path_rust).expect("creating for rust library directory is failed");
|
||||
|
||||
let build_path_javascript = format!("{}/javascript", build_path);
|
||||
if Path::new(&build_path_javascript).exists() {
|
||||
fs::remove_dir_all(&build_path_javascript)
|
||||
.expect("clean for javascript library directory is failed");
|
||||
}
|
||||
fs::create_dir_all(&build_path_javascript)
|
||||
.expect("creating for javascript library directory is failed");
|
||||
|
||||
let proto_paths: Vec<_> = PROTO_PATHS
|
||||
.iter()
|
||||
.map(|v| format!("--proto_path={}", *v))
|
||||
.collect();
|
||||
|
||||
eprintln!("proto_paths: {:?}", proto_paths);
|
||||
|
||||
for (key, protos) in TARGETS.iter() {
|
||||
let build_path_rust_sub = format!("{}/{}", &build_path_rust, key);
|
||||
fs::create_dir_all(&build_path_rust_sub)
|
||||
.expect("creating for rust library sub directory is failed");
|
||||
|
||||
let mut prost_build = prost_build::Config::new();
|
||||
prost_build.out_dir(build_path_rust_sub);
|
||||
prost_build
|
||||
.compile_protos(protos.as_slice(), PROTO_PATHS.as_slice())
|
||||
.expect("generating library for rust is failed");
|
||||
|
||||
let output = Command::new("protoc")
|
||||
.args(proto_paths.as_slice())
|
||||
.args(["--experimental_allow_proto3_optional"])
|
||||
.args(&[
|
||||
&format!(
|
||||
"--js_out=import_style=commonjs,binary:{}",
|
||||
&build_path_javascript
|
||||
),
|
||||
&format!("--ts_out={}", &build_path_javascript),
|
||||
])
|
||||
.args(protos.as_slice())
|
||||
.output()
|
||||
.expect("generating library for javascript is failed");
|
||||
eprintln!(
|
||||
"generating library for javascript status: {}",
|
||||
output.status.success()
|
||||
);
|
||||
|
||||
eprintln!(
|
||||
"generating library for typescript stdout: {}",
|
||||
String::from_utf8_lossy(&output.stdout)
|
||||
);
|
||||
eprintln!(
|
||||
"generating library for typescript stderr: {}",
|
||||
String::from_utf8_lossy(&output.stderr)
|
||||
);
|
||||
}
|
||||
}
|
17
proto/c2se/backend/identity.proto
Normal file
17
proto/c2se/backend/identity.proto
Normal file
|
@ -0,0 +1,17 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bet.beteran.c2se.backend.identity;
|
||||
|
||||
import "protobuf/rpc/error.proto";
|
||||
|
||||
// subject = bet.beteran.c2se.backend.identity.Signin
|
||||
message SigninRequest {
|
||||
string username = 1;
|
||||
string password = 2;
|
||||
string security_code = 3;
|
||||
}
|
||||
|
||||
message SigninResponse {
|
||||
optional bet.protobuf.rpc.Error error = 1;
|
||||
optional string token = 2;
|
||||
}
|
43
proto/c2se/backend/member.proto
Normal file
43
proto/c2se/backend/member.proto
Normal file
|
@ -0,0 +1,43 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bet.beteran.c2se.backend.member;
|
||||
|
||||
import "protobuf/rpc/error.proto";
|
||||
import "protobuf/pagination/pagination.proto";
|
||||
import "protobuf/pagination/search.proto";
|
||||
import "protobuf/pagination/sort.proto";
|
||||
|
||||
import "models/member/member.proto";
|
||||
|
||||
|
||||
// subject = bet.beteran.c2se.backend.member.ListMembers
|
||||
message ListMembersRequest {
|
||||
optional bet.protobuf.pagination.Pagination pagination = 1;
|
||||
repeated bet.protobuf.pagination.Search searches = 2;
|
||||
repeated bet.protobuf.pagination.Sort sorts = 3;
|
||||
}
|
||||
|
||||
message ListMembersResponse {
|
||||
optional bet.protobuf.rpc.Error error = 1;
|
||||
repeated bet.beteran.member.Member members = 2;
|
||||
}
|
||||
|
||||
// subject = bet.beteran.c2se.backend.member.GetMember
|
||||
message GetMemberRequest {
|
||||
string id = 1;
|
||||
}
|
||||
|
||||
message GetMemberResponse {
|
||||
optional bet.protobuf.rpc.Error error = 1;
|
||||
optional bet.beteran.member.Member member = 2;
|
||||
}
|
||||
|
||||
// subject = bet.beteran.c2se.backend.member.GetMemberByUsername
|
||||
message GetMemberByUsernameRequest {
|
||||
string username = 1;
|
||||
}
|
||||
|
||||
message GetMemberByUsernameResponse {
|
||||
optional bet.protobuf.rpc.Error error = 1;
|
||||
optional bet.beteran.member.Member member = 2;
|
||||
}
|
17
proto/c2se/frontend/identity.proto
Normal file
17
proto/c2se/frontend/identity.proto
Normal file
|
@ -0,0 +1,17 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bet.beteran.c2se.frontend.identity;
|
||||
|
||||
import "protobuf/rpc/error.proto";
|
||||
|
||||
// subject = bet.beteran.c2se.frontend.identity.Signin
|
||||
message SigninRequest {
|
||||
string username = 1;
|
||||
string password = 2;
|
||||
string security_code = 3;
|
||||
}
|
||||
|
||||
message SigninResponse {
|
||||
optional bet.protobuf.rpc.Error error = 1;
|
||||
optional string token = 2;
|
||||
}
|
38
proto/models/member/member.proto
Normal file
38
proto/models/member/member.proto
Normal file
|
@ -0,0 +1,38 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bet.beteran.member;
|
||||
|
||||
import "models/member/member_class.proto";
|
||||
import "models/member/member_level.proto";
|
||||
import "models/member/member_site.proto";
|
||||
|
||||
|
||||
enum MemberState {
|
||||
NONE = 0;
|
||||
NORMAL = 1;
|
||||
PENDING = 2;
|
||||
WITHDRAWAL = 3;
|
||||
DORMANCY = 4;
|
||||
BLACKLIST = 5;
|
||||
SUSPENDED = 6;
|
||||
}
|
||||
|
||||
message Member {
|
||||
string id = 1;
|
||||
string domain_id = 2;
|
||||
MemberClass member_class = 3;
|
||||
MemberLevel member_level = 4;
|
||||
MemberSite member_site = 5;
|
||||
optional Member referrer = 6;
|
||||
uint64 referred_count = 7;
|
||||
string username = 8;
|
||||
string nickname = 9;
|
||||
optional string mobile_phone_number = 10;
|
||||
MemberState state = 11;
|
||||
optional uint64 state_changed_at = 12;
|
||||
optional string last_signined_ip = 13;
|
||||
optional uint64 last_signined_at = 14;
|
||||
uint64 created_at = 15;
|
||||
uint64 updated_at = 16;
|
||||
optional uint64 deleted_at = 17;
|
||||
}
|
12
proto/models/member/member_class.proto
Normal file
12
proto/models/member/member_class.proto
Normal file
|
@ -0,0 +1,12 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bet.beteran.member;
|
||||
|
||||
message MemberClass {
|
||||
string id = 1;
|
||||
optional MemberClass parent = 2;
|
||||
string name = 3;
|
||||
uint64 created_at = 4;
|
||||
uint64 updated_at = 5;
|
||||
optional uint64 deleted_at = 6;
|
||||
}
|
12
proto/models/member/member_level.proto
Normal file
12
proto/models/member/member_level.proto
Normal file
|
@ -0,0 +1,12 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bet.beteran.member;
|
||||
|
||||
message MemberLevel {
|
||||
string id = 1;
|
||||
string name = 2;
|
||||
uint32 order = 3;
|
||||
uint64 created_at = 4;
|
||||
uint64 updated_at = 5;
|
||||
optional uint64 deleted_at = 6;
|
||||
}
|
11
proto/models/member/member_site.proto
Normal file
11
proto/models/member/member_site.proto
Normal file
|
@ -0,0 +1,11 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bet.beteran.member;
|
||||
|
||||
message MemberSite {
|
||||
string id = 1;
|
||||
string url = 2;
|
||||
uint64 created_at = 3;
|
||||
uint64 updated_at = 4;
|
||||
optional uint64 deleted_at = 5;
|
||||
}
|
16
proto/protobuf/pagination/pagination.proto
Normal file
16
proto/protobuf/pagination/pagination.proto
Normal file
|
@ -0,0 +1,16 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bet.protobuf.pagination;
|
||||
|
||||
option csharp_namespace = "Bet.Protobuf.Pagination";
|
||||
option cc_enable_arenas = true;
|
||||
option java_package = "com.bet.protobuf.pagination";
|
||||
option java_outer_classname = "PaginationProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "BetPagination";
|
||||
option go_package = "bet.com/protobuf/pagination";
|
||||
|
||||
message Pagination {
|
||||
optional uint32 page = 1;
|
||||
optional uint32 page_size = 2;
|
||||
}
|
16
proto/protobuf/pagination/search.proto
Normal file
16
proto/protobuf/pagination/search.proto
Normal file
|
@ -0,0 +1,16 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bet.protobuf.pagination;
|
||||
|
||||
option csharp_namespace = "Bet.Protobuf.Pagination";
|
||||
option cc_enable_arenas = true;
|
||||
option java_package = "com.bet.protobuf.pagination";
|
||||
option java_outer_classname = "PaginationProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "BetPagination";
|
||||
option go_package = "bet.com/protobuf/pagination";
|
||||
|
||||
message Search {
|
||||
string key = 1;
|
||||
string value = 2;
|
||||
}
|
21
proto/protobuf/pagination/sort.proto
Normal file
21
proto/protobuf/pagination/sort.proto
Normal file
|
@ -0,0 +1,21 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bet.protobuf.pagination;
|
||||
|
||||
option csharp_namespace = "Bet.Protobuf.Pagination";
|
||||
option cc_enable_arenas = true;
|
||||
option java_package = "com.bet.protobuf.pagination";
|
||||
option java_outer_classname = "PaginationProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "BetPagination";
|
||||
option go_package = "bet.com/protobuf/pagination";
|
||||
|
||||
enum SortOrder {
|
||||
ASC = 0;
|
||||
DESC = 1;
|
||||
}
|
||||
|
||||
message Sort {
|
||||
string by = 1;
|
||||
SortOrder order = 2;
|
||||
}
|
17
proto/protobuf/rpc/error.proto
Normal file
17
proto/protobuf/rpc/error.proto
Normal file
|
@ -0,0 +1,17 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package bet.protobuf.rpc;
|
||||
|
||||
option csharp_namespace = "Bet.Protobuf.Rpc";
|
||||
option cc_enable_arenas = true;
|
||||
option java_package = "com.bet.protobuf.rpc";
|
||||
option java_outer_classname = "RpcProto";
|
||||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "BetRpc";
|
||||
option go_package = "bet.com/protobuf/rpc";
|
||||
|
||||
message Error {
|
||||
int32 code = 1;
|
||||
optional string message = 2;
|
||||
optional bytes data = 3;
|
||||
}
|
0
proto/se2c/backend/.gitkeep
Normal file
0
proto/se2c/backend/.gitkeep
Normal file
0
proto/se2c/frontend/.gitkeep
Normal file
0
proto/se2c/frontend/.gitkeep
Normal file
0
proto/ss/.gitkeep
Normal file
0
proto/ss/.gitkeep
Normal file
5
src/lib.rs
Normal file
5
src/lib.rs
Normal file
|
@ -0,0 +1,5 @@
|
|||
#[cfg(test)]
|
||||
mod tests {
|
||||
#[test]
|
||||
fn it_works() {}
|
||||
}
|
Loading…
Reference in New Issue
Block a user