Upgrade mysql2 gem to latest version with rails 4? - ruby-on-rails-4

i have tried to update mysql2 gem but error are there:
Gem::LoadError: Specified 'mysql2' for database adapter, but the gem is not loaded. Add `gem 'mysql2'` to your Gemfile (and ensure its version is at the minimum required by ActiveRecord).
i have to upgrade mysql2 gem to latest version because i have to do data migration from mysql to postgresql that why we need to upgrade mysql2 gem.
also i need to dump data of mysql database that why i need to upgrade mysql2 gem to latest version.
if any one have suggestion how to done this work so, please help me and mail to: santu.essence#gmail.com
docker file:
FROM ruby:2.7.2
SHELL ["/bin/bash", "-c"]
RUN apt-get update -y --force-yes
RUN apt-get install libtag1-dev -y --force-yes
RUN apt-get install -y curl
RUN curl -sL https://deb.nodesource.com/setup_10.x | bash - \
&& apt-get install nodejs -y
# postgresql-client --- postgresql database adapter
# RUN apt-get install -y wget sudo pgloader
# install ffmpeg
# RUN apt-get install checkinstall
RUN apt-get update \
&& apt-get install -y git build-essential gcc make yasm autoconf automake cmake libtool libmp3lame-dev pkg-config libunwind-dev zlib1g-dev libssl-dev \
&& apt-get update \
&& apt-get clean
RUN apt-get install -y --no-install-recommends libc6-dev libgdiplus wget software-properties-common \
&& wget https://www.ffmpeg.org/releases/ffmpeg-4.0.2.tar.gz \
&& tar -xzf ffmpeg-4.0.2.tar.gz; rm -r ffmpeg-4.0.2.tar.gz \
&& cd ./ffmpeg-4.0.2; ./configure --enable-gpl --enable-libmp3lame --enable-decoder=mjpeg,png --enable-encoder=png --enable-openssl --enable-nonfree \
&& cd ./ffmpeg-4.0.2; make \
&& cd ./ffmpeg-4.0.2; make install \
# ffmpeg installation complete
&& rm -rf /var/lib/apt/lists/*
WORKDIR /usr/src/app
# RUN mkdir -p .bundle && printf '%s\n%s\n' '---' 'BUNDLE_WITHOUT: development:test' > .bundle/config
COPY Gemfile* ./
RUN gem install bundler -v 2.2.28
RUN bundle install
COPY . .
# ENTRYPOINT ./entrypoint-web.sh
docker-compose.yml:
version: '3'
services:
db:
image: mysql:5.6.45
restart: always
environment:
MYSQL_DATABASE: youspin_development
MYSQL_USER: root
MYSQL_PASSWORD: root
MYSQL_ROOT_PASSWORD: root
volumes:
- ./tmp/db:/var/lib/mysql
ports:
- 3306:3306
redis:
image: redis
ports:
- 6379:6379
# volumes:
# ./tmp/db:
# external: true
worker:
# set the path for Dockerfile
build: .
command: bash -c "rm -f /usr/src/app/tmp/pids/resque.pid && rake app:worker-start "
environment:
RAILS_ENV: development
REDIS_URL: redis://redis:6379/0
volumes:
- .:/usr/src/app
depends_on:
- db
- redis
web:
# set the path for Dockerfile
build: .
command: bash -c "rm -f /usr/src/app/tmp/pids/server.pid && rake assets:clobber app:init app:serve && bundle exec rails s -p 3000 -b '0.0.0.0'"
environment:
RAILS_ENV: development
REDIS_URL: redis://redis:6379/0
volumes:
- .:/usr/src/app
ports:
- 3000:3000
depends_on:
- db
- worker
- redis
nginx:
image: nginx
volumes:
- ./config/nginx.dev.conf:/etc/nginx/conf.d/default.conf
- ./public:/var/www/youspin/public
ports:
- 80:80
environment:
- NGINX_PORT=80
links:
- web

Related

xmlrpc.py Connection refused error while using supervisor in docker

hello guys im writing a docker file and compose with ubuntu 20.04 and try to install supervisor inside it
docker file :
...
FROM ubuntu:20.04
WORKDIR /src/app
ENV BACKENDENVIRONMENT 0
COPY gsigner .
COPY docker_requirements.txt ./
ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update
RUN apt install -y python-is-python3
RUN apt-get install -y python3.9
RUN apt-get install python3-pip -y
RUN apt-get install gcc musl-dev python3-dev libffi-dev openssl libssl-
dev cargo -y
RUN apt install -y postgresql postgresql-contrib
RUN apt-get update && apt-get install -y postgresql-server-dev-all gcc
python3-dev musl-dev
RUN pip install --upgrade pip setuptools wheel \
&&pip install -r docker_requirements.txt
RUN mkdir /run/gsigner
RUN apt-get install -y supervisor
COPY backend_supervisord.conf /etc/supervisor/conf.d/
dockerfile updated
docker compose :
version: "3.9"
services:
gsigner:
build: .
command: bash -c "python manage.py migrate && supervisorctl reread && supervisorctl reload&&supervisorctl start daphne"
ports:
- 8000:8000
volumes:
- static:/var/static/gsigner/
- media:/var/media/gsigner/
- gsigner:/src/app/
- log:/var/log/gsigner/
volumes:
static:
media:
log:
gsigner:
dockercompose updated
daphne is my program name in my supervisor conf file
my supervisor conf file :
[supervisord]
[supervisorctl]
[program:daphne]
command=daphne gsigner.asgi:application
directory=/src/app/gsigner/
user=root
autostart=true
autorestart=true
i really did not realize what is happening here
and this is the err msg :
error:error: <class 'ConnectionRefusedError'>, [Errno 111] Connection refused: file: /usr/lib/python3/dist-packages/supervisor/xmlrpc.py line: 560

docker-compose.yml for production - Django and Celery

I'm looking to deploy a simple application which uses Django and celery.
docker-compose.yml:
version: "3.8"
services:
django:
build: .
container_name: django
command: python manage.py runserver 0.0.0.0:8000
volumes:
- .:/usr/src/app/
ports:
- "8000:8000"
environment:
- DEBUG=1
- CELERY_BROKER=redis://redis:6379/0
- CELERY_BACKEND=djcelery.backends.database:DatabaseBackend
depends_on:
- redis
celery:
build: .
command: celery -A core worker -l INFO
volumes:
- .:/usr/src/app
environment:
- DEBUG=1
- CELERY_BROKER=redis://redis:6379/0
- CELERY_BACKEND=djcelery.backends.database:DatabaseBackend
depends_on:
- django
- redis
redis:
image: "redis:alpine"
volumes:
pgdata:
Dockerfile:
FROM python:3.7
WORKDIR /app
ADD . /app
#Install dependencies for PyODBC
RUN apt-get update \
&& apt-get install unixodbc -y \
&& apt-get install unixodbc-dev -y \
&& apt-get install tdsodbc -y \
&& apt-get clean -y
# install ODBC driver in docker image
RUN apt-get update \
&& curl https://packages.microsoft.com/keys/microsoft.asc | apt-key add - \
&& curl https://packages.microsoft.com/config/debian/10/prod.list > /etc/apt/sources.list.d/mssql-release.list \
&& apt-get update \
&& ACCEPT_EULA=Y apt-get install --yes --no-install-recommends msodbcsql17 \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* \
&& rm -rf /tmp/*
# install requirements
RUN pip install --trusted-host pypi.python.org -r requirements.txt
EXPOSE 5000
ENV NAME OpentoAll
CMD ["python", "app.py"]
Project Directories:
When I run "docker-compose up" locally, the celery worker is run and I am able to go to localhost:8000 to access the API to make asynchronous requests to a celery task.
Now I'm wondering how can I deploy this to the cloud environment? What would be the image I would need to build and deploy? Thanks
You will need to install an application server (eg. gunicorn) in your django container and then run it on say port 8000. You'll also need a webserver (eg. nginx) in a container or installed on the host. The web server will need to act as a reverse proxy for gunicorn and also serve your static Django content.

Azure Docker Django sqlite3 not deploying

I have spent an entire day trying to deploy a simple Django app to Azure with a docker container following this links advice.
Dockerfile:
# My Site
# Version: 1.0
FROM python:3.7.2
# Install Python and Package Libraries
RUN apt-get update && apt-get upgrade -y && apt-get autoremove && apt-get autoclean
RUN apt-get install -y \
libffi-dev \
libssl-dev \
default-libmysqlclient-dev \
libxml2-dev \
libxslt-dev \
libjpeg-dev \
libfreetype6-dev \
zlib1g-dev \
net-tools \
vim
# Project Files and Settings
RUN mkdir -p myproject
WORKDIR /myproject
COPY requirements.txt manage.py . ./
RUN pip install -r requirements.txt
# Server
EXPOSE 8000
STOPSIGNAL SIGINT
ENTRYPOINT ["python", "manage.py"]
CMD ["runserver", "0.0.0.0:8000"]
docker-compose.yml
version: "2"
services:
django:
container_name: django_server
build:
context: .
dockerfile: Dockerfile
image: johnmweisz/education_app:latest
stdin_open: true
tty: true
volumes:
- .:/myproject
ports:
- "8000:8000"
using docker-compose build/run locally works perfectly fine but when deploying the app from https://cloud.docker.com/repository/docker/johnmweisz/education_app
to Azure it will not start and says that it cannot find manage.py.
I keep going in circles trying to find instructions that work. Anyone with advice please help.

How to attach graph-tool to Django using Docker

I need to use some graph-tool calculations in my Django project. So I started with docker pull tiagopeixoto/graph-tool and then added it to my Docker-compose file:
version: '3'
services:
db:
image: postgres
graph-tool:
image: dcagatay/graph-tool
web:
build: .
command: python3 manage.py runserver 0.0.0.0:8000
volumes:
- .:/code
ports:
- "8000:8000"
depends_on:
- db
- graph-tool
When I up my docker-compose I got a line:
project_graph-tool_1_87e2d144b651 exited with code 0
And finally when my Django projects starts I can not import modules from graph-tool, like:
from graph_tool.all import *
If I try work directly in this docker image using:
docker run -it -u user -w /home/user tiagopeixoto/graph-tool ipython
everything goes fine.
What am I doing wrong and how can I fix it and finally attach graph-tool to Django? Thanks!
Rather than using a seperate docker image for graphtool, i think its better to use it within the same Dockerfile which you are using for Django. For example, update your current Dockerfile:
FROM ubuntu:16.04 # using ubuntu image
ENV PYTHONUNBUFFERED 1
ENV C_FORCE_ROOT true
# python3-graph-tool specific requirements for installation in Ubuntu from documentation
RUN echo "deb http://downloads.skewed.de/apt/xenial xenial universe" >> /etc/apt/sources.list && \
echo "deb-src http://downloads.skewed.de/apt/xenial xenial universe" >> /etc/apt/sources.list
RUN apt-key adv --keyserver pgp.skewed.de --recv-key 612DEFB798507F25
# Install dependencies
RUN apt-get update \
&& apt-get install -y python3-pip python3-dev \
&& apt-get install --yes --no-install-recommends --allow-unauthenticated python3-graph-tool \
&& cd /usr/local/bin \
&& ln -s /usr/bin/python3 python \
&& pip3 install --upgrade pip
# Project specific setups
# These steps might be different in your project
RUN mkdir /code
WORKDIR /code
ADD . /code
RUN pip3 install -r requirements.pip
Now update your docker-compose file as well:
version: '3'
services:
db:
image: postgres
web:
build: .
container_name: djcon # <-- preferred over generated name
command: python3 manage.py runserver 0.0.0.0:8000
volumes:
- .:/code
ports:
- "8000:8000"
depends_on:
- db
Thats it. Now if you go to your web service's shell by docker exec -ti djcon bash(or any generated name instead of djcon), and access the django shell like this python manage.py shell. Then type from graph_tool.all import * and it will not throw any import error.

How to deploy docker image created with version 2 on the aws

I am new to docker. I did somehow create docker project with version 2 docker compose following is my docker-compose.yml
version: "2"
services:
# Configuration for php web server
webserver:
image: inshastri/laravel-adminpanel:latest
restart: always
ports:
- '8080:80'
networks:
- web
volumes:
- ./:/var/www/html
- ./apache.conf:/etc/apache2/sites-available/000-default.conf
depends_on:
- db
links:
- db
# - redis
environment:
DB_HOST: db
DB_DATABASE: phpapp
DB_USERNAME: root
DB_PASSWORD: toor
# Configuration for mysql db server
db:
image: "mysql:5"
volumes:
- ./mysql:/etc/mysql/conf.d
environment:
MYSQL_ROOT_PASSWORD: toor
MYSQL_DATABASE: phpapp
networks:
- web
restart: always
# Configuration for phpmyadmin (optional)
phpmyadmin:
image: phpmyadmin/phpmyadmin
environment:
PMA_PORT: 3306
PMA_HOST: db
PMA_USER: root
PMA_PASSWORD: toor
ports:
- "8004:80"
restart: always
depends_on:
- db
networks:
- web
redis:
image: redis:4.0-alpine
# Network connecting the whole app
networks:
web:
driver: bridge
and with docker file as below
FROM ubuntu:16.04
RUN apt-get update \
&& apt-get install -qy language-pack-en-base \
&& locale-gen en_US.UTF-8
ENV LANG en_US.UTF-8
ENV LC_ALL en_US.UTF-8
RUN apt-get -y install apache2
RUN a2enmod headers
RUN a2enmod rewrite
# add PPA for PHP 7
RUN apt-get install -y --no-install-recommends apt-utils
RUN apt-get install -y software-properties-common python-software-properties
RUN add-apt-repository -y ppa:ondrej/php
# Adding php 7
RUN apt-get update
RUN apt-get install -y php7.1 php7.1-fpm php7.1-cli php7.1-common php7.1-mbstring php7.1-gd php7.1-intl php7.1-xml php7.1-mysql php7.1-mcrypt php7.1-zip
RUN apt-get -y install libapache2-mod-php7.1 php7.1 php7.1-cli php-xdebug php7.1-mbstring sqlite3 php7.1-mysql php-imagick php-memcached php-pear curl imagemagick php7.1-dev php7.1-phpdbg php7.1-gd npm nodejs-legacy php7.1-json php7.1-curl php7.1-sqlite3 php7.1-intl apache2 vim git-core wget libsasl2-dev libssl-dev
RUN apt-get -y install libsslcommon2-dev libcurl4-openssl-dev autoconf g++ make openssl libssl-dev libcurl4-openssl-dev pkg-config libsasl2-dev libpcre3-dev
RUN apt-get install -y imagemagick graphicsmagick
RUN a2enmod headers
RUN a2enmod rewrite
ENV APACHE_RUN_USER www-data
ENV APACHE_RUN_GROUP www-data
ENV APACHE_LOG_DIR /var/log/apache2
ENV APACHE_PID_FILE /var/run/apache2.pid
ENV APACHE_RUN_DIR /var/run/apache2
ENV APACHE_LOCK_DIR /var/lock/apache2
RUN ln -sf /dev/stdout /var/log/apache2/access.log && \
ln -sf /dev/stderr /var/log/apache2/error.log
RUN mkdir -p $APACHE_RUN_DIR $APACHE_LOCK_DIR $APACHE_LOG_DIR
# Update application repository list and install the Redis server.
RUN apt-get update && apt-get install -y redis-server
# Allow Composer to be run as root
ENV COMPOSER_ALLOW_SUPERUSER 1
# Setup the Composer installer
RUN curl -o /tmp/composer-setup.php https://getcomposer.org/installer \
&& curl -o /tmp/composer-setup.sig https://composer.github.io/installer.sig \
&& php -r "if (hash('SHA384', file_get_contents('/tmp/composer-setup.php')) !== trim(file_get_contents('/tmp/composer-setup.sig'))) { unlink('/tmp/composer-setup.php'); echo 'Invalid installer' . PHP_EOL; exit(1); }" \
&& php /tmp/composer-setup.php \
&& chmod a+x composer.phar \
&& mv composer.phar /usr/local/bin/composer
# Install composer dependencies
RUN echo pwd: `pwd` && echo ls: `ls`
# RUN composer install
EXPOSE 80
# Expose default port
EXPOSE 6379
VOLUME [ "/var/www/html" ,"./mysql:/etc/mysql/conf.d",]
WORKDIR /var/www/html
ENTRYPOINT [ "/usr/sbin/apache2" ]
CMD ["-D", "FOREGROUND"]
COPY . /var/www/html
COPY ./apache.conf /etc/apache2/sites-available/000-default.conf
Now there are 2 things which i cannot understand after googling a lot
1) when i give the image to my friend he took the pull and when he ran it was without the other services like mysql and phpmyadmin
2) how should i deploy this application to ec2 amazon
There are lots of things but cannot understand any of them like ec2 beanstalk etc
Please guide a simple uploading of my image file to aws and run on it also how can i run my image on my friends pc as i thougth docker was a container managment system it should get all my services as when my friend or any one takes a pull of my image
for refrence my image is inshastri/laravel-adminpanel
Please help thanks in advance