wso2esb4.6.0 giving errors in particular local machine - wso2

i am using wso2esb 4.6.0 and wso2dss3.0.1 both r giving errors like
Port already in use I fix this using kill commend .Now its giving database errors like
INFO - AgentDS Successfully deployed Agent Client
[2013-10-28 10:43:30,001] ERROR - DatabaseUtil Database Error - General error: "java.lang.RuntimeException: page[325] data leaf table:0 entries:16 parent:144 keys:[506, 508, 510, 512, 514, 516, 522, 528, 530, 532, 534, 545, 547, 549, 551, 553] offsets:[1936, 1825, 1713, 1601, 1488, 1374, 1262, 1150, 1038, 926, 815, 702, 589, 478, 366, 253] parent 144 expected 28" [50000-140]
org.h2.jdbc.JdbcSQLException: General error: "java.lang.RuntimeException: page[325] data leaf table:0 entries:16 parent:144 keys:[506, 508, 510, 512, 514, 516, 522, 528, 530, 532, 534, 545, 547, 549, 551, 553] offsets:[1936, 1825, 1713, 1601, 1488, 1374, 1262, 1150, 1038, 926, 815, 702, 589, 478, 366, 253] parent 144 expected 28" [50000-140]
at org.h2.message.DbException.getJdbcSQLException(DbException.java:327)
at org.h2.message.DbException.get(DbException.java:156)
at org.h2.message.DbException.convert(DbException.java:279)
at org.h2.engine.Database.openDatabase(Database.java:237)
at org.h2.engine.Database.<init>(Database.java:202)
at org.h2.engine.Engine.openSession(Engine.java:56)
at org.h2.engine.Engine.openSession(Engine.java:146)
at org.h2.engine.Engine.getSession(Engine.java:125)
at org.h2.engine.Session.createSession(Session.java:122)
at org.h2.engine.SessionRemote.connectEmbeddedOrServer(SessionRemote.java:241)
at org.h2.engine.SessionRemote.createSession(SessionRemote.java:219)
at org.h2.jdbc.JdbcConnection.<init>(JdbcConnection.java:111)
at org.h2.jdbc.JdbcConnection.<init>(JdbcConnection.java:95)
at org.h2.Driver.connect(Driver.java:73)
at org.apache.tomcat.jdbc.pool.PooledConnection.connectUsingDriver(PooledConnection.java:277)
at org.apache.tomcat.jdbc.pool.PooledConnection.connect(PooledConnection.java:182)
at org.apache.tomcat.jdbc.pool.ConnectionPool.createConnection(ConnectionPool.java:694)
at org.apache.tomcat.jdbc.pool.ConnectionPool.borrowConnection(ConnectionPool.java:626)
at org.apache.tomcat.jdbc.pool.ConnectionPool.getConnection(ConnectionPool.java:182)
at org.apache.tomcat.jdbc.pool.DataSourceProxy.getConnection(DataSourceProxy.java:127)
at org.wso2.carbon.user.core.claim.dao.ClaimDAO.getDialectCount(ClaimDAO.java:159)
at org.wso2.carbon.user.core.common.DefaultRealm.populateProfileAndClaimMaps(DefaultRealm.java:309)
at org.wso2.carbon.user.core.common.DefaultRealm.init(DefaultRealm.java:109)
at org.wso2.carbon.user.core.common.DefaultRealmService.initializeRealm(DefaultRealmService.java:218)
at org.wso2.carbon.user.core.common.DefaultRealmService.<init>(DefaultRealmService.java:103)
at org.wso2.carbon.user.core.common.DefaultRealmService.<init>(DefaultRealmService.java:116)
at org.wso2.carbon.user.core.internal.Activator.startDeploy(Activator.java:67)
at org.wso2.carbon.user.core.internal.BundleCheckActivator.start(BundleCheckActivator.java:61)
at org.eclipse.osgi.framework.internal.core.BundleContextImpl$1.run(BundleContextImpl.java:711)
at java.security.AccessController.doPrivileged(Native Method)
at org.eclipse.osgi.framework.internal.core.BundleContextImpl.startActivator(BundleContextImpl.java:702)
at org.eclipse.osgi.framework.internal.core.BundleContextImpl.start(BundleContextImpl.java:683)
at org.eclipse.osgi.framework.internal.core.BundleHost.startWorker(BundleHost.java:381)
at org.eclipse.osgi.framework.internal.core.AbstractBundle.resume(AbstractBundle.java:389)
at org.eclipse.osgi.framework.internal.core.Framework.resumeBundle(Framework.java:1130)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.resumeBundles(StartLevelManager.java:559)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.resumeBundles(StartLevelManager.java:544)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.incFWSL(StartLevelManager.java:457)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.doSetStartLevel(StartLevelManager.java:243)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.dispatchEvent(StartLevelManager.java:438)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.dispatchEvent(StartLevelManager.java:1)
at org.eclipse.osgi.framework.eventmgr.EventManager.dispatchEvent(EventManager.java:230)
at org.eclipse.osgi.framework.eventmgr.EventManager$EventThread.run(EventManager.java:340)
Caused by: java.lang.RuntimeException: page[325] data leaf table:0 entries:16 parent:144 keys:[506, 508, 510, 512, 514, 516, 522, 528, 530, 532, 534, 545, 547, 549, 551, 553] offsets:[1936, 1825, 1713, 1601, 1488, 1374, 1262, 1150, 1038, 926, 815, 702, 589, 478, 366, 253] parent 144 expected 28
at org.h2.message.DbException.throwInternalError(DbException.java:226)
at org.h2.index.PageDataIndex.getPage(PageDataIndex.java:231)
at org.h2.index.PageDataNode.getLastKey(PageDataNode.java:221)
at org.h2.index.PageDataIndex.<init>(PageDataIndex.java:74)
at org.h2.table.RegularTable.<init>(RegularTable.java:75)
at org.h2.store.PageStore.addMeta(PageStore.java:1443)
at org.h2.store.PageStore.readMetaData(PageStore.java:1380)
at org.h2.store.PageStore.recover(PageStore.java:1178)
at org.h2.store.PageStore.openExisting(PageStore.java:317)
at org.h2.store.PageStore.open(PageStore.java:271)
at org.h2.engine.Database.getPageStore(Database.java:2059)
at org.h2.engine.Database.open(Database.java:534)
at org.h2.engine.Database.openDatabase(Database.java:207)
... 39 more
[2013-10-28 10:43:30,012] ERROR - DefaultRealmService Cannot initialize the realm.
org.wso2.carbon.user.core.UserStoreException: Database Error - General error: "java.lang.RuntimeException: page[325] data leaf table:0 entries:16 parent:144 keys:[506, 508, 510, 512, 514, 516, 522, 528, 530, 532, 534, 545, 547, 549, 551, 553] offsets:[1936, 1825, 1713, 1601, 1488, 1374, 1262, 1150, 1038, 926, 815, 702, 589, 478, 366, 253] parent 144 expected 28" [50000-140]
at org.wso2.carbon.user.core.claim.dao.ClaimDAO.getDialectCount(ClaimDAO.java:169)
at org.wso2.carbon.user.core.common.DefaultRealm.populateProfileAndClaimMaps(DefaultRealm.java:309)
at org.wso2.carbon.user.core.common.DefaultRealm.init(DefaultRealm.java:109)
at org.wso2.carbon.user.core.common.DefaultRealmService.initializeRealm(DefaultRealmService.java:218)
at org.wso2.carbon.user.core.common.DefaultRealmService.<init>(DefaultRealmService.java:103)
at org.wso2.carbon.user.core.common.DefaultRealmService.<init>(DefaultRealmService.java:116)
at org.wso2.carbon.user.core.internal.Activator.startDeploy(Activator.java:67)
at org.wso2.carbon.user.core.internal.BundleCheckActivator.start(BundleCheckActivator.java:61)
at org.eclipse.osgi.framework.internal.core.BundleContextImpl$1.run(BundleContextImpl.java:711)
at java.security.AccessController.doPrivileged(Native Method)
at org.eclipse.osgi.framework.internal.core.BundleContextImpl.startActivator(BundleContextImpl.java:702)
at org.eclipse.osgi.framework.internal.core.BundleContextImpl.start(BundleContextImpl.java:683)
at org.eclipse.osgi.framework.internal.core.BundleHost.startWorker(BundleHost.java:381)
at org.eclipse.osgi.framework.internal.core.AbstractBundle.resume(AbstractBundle.java:389)
at org.eclipse.osgi.framework.internal.core.Framework.resumeBundle(Framework.java:1130)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.resumeBundles(StartLevelManager.java:559)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.resumeBundles(StartLevelManager.java:544)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.incFWSL(StartLevelManager.java:457)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.doSetStartLevel(StartLevelManager.java:243)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.dispatchEvent(StartLevelManager.java:438)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.dispatchEvent(StartLevelManager.java:1)
at org.eclipse.osgi.framework.eventmgr.EventManager.dispatchEvent(EventManager.java:230)
at org.eclipse.osgi.framework.eventmgr.EventManager$EventThread.run(EventManager.java:340)
Caused by: org.h2.jdbc.JdbcSQLException: General error: "java.lang.RuntimeException: page[325] data leaf table:0 entries:16 parent:144 keys:[506, 508, 510, 512, 514, 516, 522, 528, 530, 532, 534, 545, 547, 549, 551, 553] offsets:[1936, 1825, 1713, 1601, 1488, 1374, 1262, 1150, 1038, 926, 815, 702, 589, 478, 366, 253] parent 144 expected 28" [50000-140]
at org.h2.message.DbException.getJdbcSQLException(DbException.java:327)
at org.h2.message.DbException.get(DbException.java:156)
at org.h2.message.DbException.convert(DbException.java:279)
at org.h2.engine.Database.openDatabase(Database.java:237)
at org.h2.engine.Database.<init>(Database.java:202)
at org.h2.engine.Engine.openSession(Engine.java:56)
at org.h2.engine.Engine.openSession(Engine.java:146)
at org.h2.engine.Engine.getSession(Engine.java:125)
at org.h2.engine.Session.createSession(Session.java:122)
at org.h2.engine.SessionRemote.connectEmbeddedOrServer(SessionRemote.java:241)
at org.h2.engine.SessionRemote.createSession(SessionRemote.java:219)
at org.h2.jdbc.JdbcConnection.<init>(JdbcConnection.java:111)
at org.h2.jdbc.JdbcConnection.<init>(JdbcConnection.java:95)
at org.h2.Driver.connect(Driver.java:73)
at org.apache.tomcat.jdbc.pool.PooledConnection.connectUsingDriver(PooledConnection.java:277)
at org.apache.tomcat.jdbc.pool.PooledConnection.connect(PooledConnection.java:182)
at org.apache.tomcat.jdbc.pool.ConnectionPool.createConnection(ConnectionPool.java:694)
at org.apache.tomcat.jdbc.pool.ConnectionPool.borrowConnection(ConnectionPool.java:626)
at org.apache.tomcat.jdbc.pool.ConnectionPool.getConnection(ConnectionPool.java:182)
at org.apache.tomcat.jdbc.pool.DataSourceProxy.getConnection(DataSourceProxy.java:127)
at org.wso2.carbon.user.core.claim.dao.ClaimDAO.getDialectCount(ClaimDAO.java:159)
... 22 more
Caused by: java.lang.RuntimeException: page[325] data leaf table:0 entries:16 parent:144 keys:[506, 508, 510, 512, 514, 516, 522, 528, 530, 532, 534, 545, 547, 549, 551, 553] offsets:[1936, 1825, 1713, 1601, 1488, 1374, 1262, 1150, 1038, 926, 815, 702, 589, 478, 366, 253] parent 144 expected 28
at org.h2.message.DbException.throwInternalError(DbException.java:226)
at org.h2.index.PageDataIndex.getPage(PageDataIndex.java:231)
at org.h2.index.PageDataNode.getLastKey(PageDataNode.java:221)
at org.h2.index.PageDataIndex.<init>(PageDataIndex.java:74)
at org.h2.table.RegularTable.<init>(RegularTable.java:75)
at org.h2.store.PageStore.addMeta(PageStore.java:1443)
at org.h2.store.PageStore.readMetaData(PageStore.java:1380)
at org.h2.store.PageStore.recover(PageStore.java:1178)
at org.h2.store.PageStore.openExisting(PageStore.java:317)
at org.h2.store.PageStore.open(PageStore.java:271)
at org.h2.engine.Database.getPageStore(Database.java:2059)
at org.h2.engine.Database.open(Database.java:534)
at org.h2.engine.Database.openDatabase(Database.java:207)
... 39 more
[2013-10-28 10:43:30,021] ERROR - Activator Cannot start User Manager Core bundle
org.wso2.carbon.user.core.UserStoreException: Cannot initialize the realm.
at org.wso2.carbon.user.core.common.DefaultRealmService.initializeRealm(DefaultRealmService.java:222)
at org.wso2.carbon.user.core.common.DefaultRealmService.<init>(DefaultRealmService.java:103)
at org.wso2.carbon.user.core.common.DefaultRealmService.<init>(DefaultRealmService.java:116)
at org.wso2.carbon.user.core.internal.Activator.startDeploy(Activator.java:67)
at org.wso2.carbon.user.core.internal.BundleCheckActivator.start(BundleCheckActivator.java:61)
at org.eclipse.osgi.framework.internal.core.BundleContextImpl$1.run(BundleContextImpl.java:711)
at java.security.AccessController.doPrivileged(Native Method)
at org.eclipse.osgi.framework.internal.core.BundleContextImpl.startActivator(BundleContextImpl.java:702)
at org.eclipse.osgi.framework.internal.core.BundleContextImpl.start(BundleContextImpl.java:683)
at org.eclipse.osgi.framework.internal.core.BundleHost.startWorker(BundleHost.java:381)
at org.eclipse.osgi.framework.internal.core.AbstractBundle.resume(AbstractBundle.java:389)
at org.eclipse.osgi.framework.internal.core.Framework.resumeBundle(Framework.java:1130)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.resumeBundles(StartLevelManager.java:559)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.resumeBundles(StartLevelManager.java:544)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.incFWSL(StartLevelManager.java:457)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.doSetStartLevel(StartLevelManager.java:243)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.dispatchEvent(StartLevelManager.java:438)
at org.eclipse.osgi.framework.internal.core.StartLevelManager.dispatchEvent(StartLevelManager.java:1)
at org.eclipse.osgi.framework.eventmgr.EventManager.dispatchEvent(EventManager.java:230)
at org.eclipse.osgi.framework.eventmgr.EventManager$EventThread.run(EventManager.java:340)
Caused by: org.wso2.carbon.user.core.UserStoreException: Database Error - General error: "java.lang.RuntimeException: page[325] data leaf table:0 entries:16 parent:144 keys:[506, 508, 510, 512, 514, 516, 522, 528, 530, 532, 534, 545, 547, 549, 551, 553] offsets:[1936, 1825, 1713, 1601, 1488, 1374, 1262, 1150, 1038, 926, 815, 702, 589, 478, 366, 253] parent 144 expected 28" [50000-140]
at org.wso2.carbon.user.core.claim.dao.ClaimDAO.getDialectCount(ClaimDAO.java:169)
at org.wso2.carbon.user.core.common.DefaultRealm.populateProfileAndClaimMaps(DefaultRealm.java:309)
at org.wso2.carbon.user.core.common.DefaultRealm.init(DefaultRealm.java:109)
at org.wso2.carbon.user.core.common.DefaultRealmService.initializeRealm(DefaultRealmService.java:218)
... 19 more
Caused by: org.h2.jdbc.JdbcSQLException: General error: "java.lang.RuntimeException: page[325] data leaf table:0 entries:16 parent:144 keys:[506, 508, 510, 512, 514, 516, 522, 528, 530, 532, 534, 545, 547, 549, 551, 553] offsets:[1936, 1825, 1713, 1601, 1488, 1374, 1262, 1150, 1038, 926, 815, 702, 589, 478, 366, 253] parent 144 expected 28" [50000-140]
at org.h2.message.DbException.getJdbcSQLException(DbException.java:327)
at org.h2.message.DbException.get(DbException.java:156)
at org.h2.message.DbException.convert(DbException.java:279)
at org.h2.engine.Database.openDatabase(Database.java:237)
at org.h2.engine.Database.<init>(Database.java:202)
at org.h2.engine.Engine.openSession(Engine.java:56)
at org.h2.engine.Engine.openSession(Engine.java:146)
at org.h2.engine.Engine.getSession(Engine.java:125)
at org.h2.engine.Session.createSession(Session.java:122)
at org.h2.engine.SessionRemote.connectEmbeddedOrServer(SessionRemote.java:241)
at org.h2.engine.SessionRemote.createSession(SessionRemote.java:219)
at org.h2.jdbc.JdbcConnection.<init>(JdbcConnection.java:111)
at org.h2.jdbc.JdbcConnection.<init>(JdbcConnection.java:95)
at org.h2.Driver.connect(Driver.java:73)
at org.apache.tomcat.jdbc.pool.PooledConnection.connectUsingDriver(PooledConnection.java:277)
at org.apache.tomcat.jdbc.pool.PooledConnection.connect(PooledConnection.java:182)
at org.apache.tomcat.jdbc.pool.ConnectionPool.createConnection(ConnectionPool.java:694)
at org.apache.tomcat.jdbc.pool.ConnectionPool.borrowConnection(ConnectionPool.java:626)
at org.apache.tomcat.jdbc.pool.ConnectionPool.getConnection(ConnectionPool.java:182)
at org.apache.tomcat.jdbc.pool.DataSourceProxy.getConnection(DataSourceProxy.java:127)
at org.wso2.carbon.user.core.claim.dao.ClaimDAO.getDialectCount(ClaimDAO.java:159)
... 22 more
Caused by: java.lang.RuntimeException: page[325] data leaf table:0 entries:16 parent:144 keys:[506, 508, 510, 512, 514, 516, 522, 528, 530, 532, 534, 545, 547, 549, 551, 553] offsets:[1936, 1825, 1713, 1601, 1488, 1374, 1262, 1150, 1038, 926, 815, 702, 589, 478, 366, 253] parent 144 expected 28
at org.h2.message.DbException.throwInternalError(DbException.java:226)
at org.h2.index.PageDataIndex.getPage(PageDataIndex.java:231)
at org.h2.index.PageDataNode.getLastKey(PageDataNode.java:221)
at org.h2.index.PageDataIndex.<init>(PageDataIndex.java:74)
at org.h2.table.RegularTable.<init>(RegularTable.java:75)
at org.h2.store.PageStore.addMeta(PageStore.java:1443)
at org.h2.store.PageStore.readMetaData(PageStore.java:1380)
at org.h2.store.PageStore.recover(PageStore.java:1178)
at org.h2.store.PageStore.openExisting(PageStore.java:317)
at org.h2.store.PageStore.open(PageStore.java:271)
at org.h2.engine.Database.getPageStore(Database.java:2059)
at org.h2.engine.Database.open(Database.java:534)
at org.h2.engine.Database.openDatabase(Database.java:207)
... 39 more
how can i avoid this is this my machine issue.

If you are getting errors like Port already in use, when using more than one WSO2 product, that means you are trying to start both products in the same port. Please make sure you have set the port off set(change) value uniquely, in Carbon.xml (CARBON_HOME/repository/conf/carbon.xml) and then start the servers again.
IE
<Offset>0</Offset>

Related

Catch errors when connecting to database with Django loggers

I wan't to log errors related to failed connection to the database. For example if I provide it an invalid URL I get the error
Traceback (most recent call last):
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/backends/base/base.py", line 219, in ensure_connection
self.connect()
File "/home/vagrant/venv/lib/python3.6/site-packages/django/utils/asyncio.py", line 26, in inner
return func(*args, **kwargs)
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/backends/base/base.py", line 200, in connect
self.connection = self.get_new_connection(conn_params)
File "/home/vagrant/venv/lib/python3.6/site-packages/django/utils/asyncio.py", line 26, in inner
return func(*args, **kwargs)
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/backends/postgresql/base.py", line 187, in get_new_connection
connection = Database.connect(**conn_params)
File "/home/vagrant/venv/lib/python3.6/site-packages/psycopg2/__init__.py", line 122, in connect
conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
psycopg2.OperationalError: could not translate host name "wrong-url.com" to address: Name or service not known
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/lib/python3.6/threading.py", line 916, in _bootstrap_inner
self.run()
File "/usr/lib/python3.6/threading.py", line 864, in run
self._target(*self._args, **self._kwargs)
File "/home/vagrant/venv/lib/python3.6/site-packages/django/utils/autoreload.py", line 64, in wrapper
fn(*args, **kwargs)
File "/home/vagrant/venv/lib/python3.6/site-packages/django/core/management/commands/runserver.py", line 121, in inner_run
self.check_migrations()
File "/home/vagrant/venv/lib/python3.6/site-packages/django/core/management/base.py", line 486, in check_migrations
executor = MigrationExecutor(connections[DEFAULT_DB_ALIAS])
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/migrations/executor.py", line 18, in __init__
self.loader = MigrationLoader(self.connection)
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/migrations/loader.py", line 53, in __init__
self.build_graph()
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/migrations/loader.py", line 220, in build_graph
self.applied_migrations = recorder.applied_migrations()
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/migrations/recorder.py", line 77, in applied_migrations
if self.has_table():
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/migrations/recorder.py", line 55, in has_table
with self.connection.cursor() as cursor:
File "/home/vagrant/venv/lib/python3.6/site-packages/django/utils/asyncio.py", line 26, in inner
return func(*args, **kwargs)
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/backends/base/base.py", line 259, in cursor
return self._cursor()
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/backends/base/base.py", line 235, in _cursor
self.ensure_connection()
File "/home/vagrant/venv/lib/python3.6/site-packages/django/utils/asyncio.py", line 26, in inner
return func(*args, **kwargs)
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/backends/base/base.py", line 219, in ensure_connection
self.connect()
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/utils.py", line 90, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/backends/base/base.py", line 219, in ensure_connection
self.connect()
File "/home/vagrant/venv/lib/python3.6/site-packages/django/utils/asyncio.py", line 26, in inner
return func(*args, **kwargs)
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/backends/base/base.py", line 200, in connect
self.connection = self.get_new_connection(conn_params)
File "/home/vagrant/venv/lib/python3.6/site-packages/django/utils/asyncio.py", line 26, in inner
return func(*args, **kwargs)
File "/home/vagrant/venv/lib/python3.6/site-packages/django/db/backends/postgresql/base.py", line 187, in get_new_connection
connection = Database.connect(**conn_params)
File "/home/vagrant/venv/lib/python3.6/site-packages/psycopg2/__init__.py", line 122, in connect
conn = _connect(dsn, connection_factory=connection_factory, **kwasync)
django.db.utils.OperationalError: could not translate host name "wrong-url.com" to address: Name or service not known
For loggers I defined the following
"handlers": {
"console": {"level": "DEBUG", "class": "logging.StreamHandler", "formatter": "console"},
},
"loggers": {
"django": {"handlers": ["console"], "level": "INFO", "propagate": True},
"django.db": {"handlers": ["console"], "level": "INFO", "propagate": True},
"django.db.backends": {"handlers": ["console"], "level": "INFO", "propagate": True},
},
But sadly this doesn't capture exceptions when connecting to the database.
Sidenote: If I set level to DEBUG on the loggers, it does log all SQL queries so it's not a problem with the handler I believe.

DataFlow streaming process stuck

Running Apache Beam DataFlow, I was able to run a Streaming pipeline successfully for 12 days (November 5-17), then DataFlow job stopped processing data.
I see SSL errors when contacting AI Platform prediction and DataFlow shows:
Processing stuck in step <step_id> for at least <time_interval> without outputting or completing in state finish at <stack_trace>
Is it enough by handling SSL exception?, what is the best way to prevent this deadlock in DataFlow.
Related article here
Version
Streaming Job
Python 2.7 Apache Beam GCP: 2.16.0
Flow
Tweet Python listener (Reads Tweets using tweepy) -> PubSub -> DataFlow (Reads data from PubSub, calls AI Platform prediction) -> BigQuery
Listener code
DataFlow code
I tried to access my data today and noticed that Pipeline stopped processing back in November 17th. I see this error:
2019-12-06 21:15:26.960 PSTError message from worker: Processing stuck in step s02 for at least 476h25m00s without outputting or completing in state finish at sun.misc.Unsafe.park(Native Method) at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175) at java.util.concurrent.CompletableFuture$Signaller.block(CompletableFuture.java:1693) at java.util.concurrent.ForkJoinPool.managedBlock(ForkJoinPool.java:3323) at java.util.concurrent.CompletableFuture.waitingGet(CompletableFuture.java:1729) at java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1895) at org.apache.beam.sdk.util.MoreFutures.get(MoreFutures.java:57) at org.apache.beam.runners.dataflow.worker.fn.control.RegisterAndProcessBundleOperation.finish(RegisterAndProcessBundleOperation.java:330) at org.apache.beam.runners.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:85) at org.apache.beam.runners.dataflow.worker.fn.control.BeamFnMapTaskExecutor.execute(BeamFnMapTaskExecutor.java:125) at org.apache.beam.runners.dataflow.worker.StreamingDataflowWorker.process(StreamingDataflowWorker.java:1320) at org.apache.beam.runners.dataflow.worker.StreamingDataflowWorker.access$1000(StreamingDataflowWorker.java:151) at org.apache.beam.runners.dataflow.worker.StreamingDataflowWorker$6.run(StreamingDataflowWorker.java:1053) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745)
I also see this error:
java.util.concurrent.ExecutionException: java.lang.RuntimeException: Error received from SDK harness for instruction -2222113: Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/worker/sdk_worker.py", line 158, in _execute
response = task()
File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/worker/sdk_worker.py", line 191, in <lambda>
self._execute(lambda: worker.do_instruction(work), work)
File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/worker/sdk_worker.py", line 343, in do_instruction
request.instruction_id)
File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/worker/sdk_worker.py", line 369, in process_bundle
bundle_processor.process_bundle(instruction_id))
File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/worker/bundle_processor.py", line 663, in process_bundle
data.ptransform_id].process_encoded(data.data)
File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/worker/bundle_processor.py", line 143, in process_encoded
self.output(decoded_value)
File "apache_beam/runners/worker/operations.py", line 255, in apache_beam.runners.worker.operations.Operation.output
def output(self, windowed_value, output_index=0):
File "apache_beam/runners/worker/operations.py", line 256, in apache_beam.runners.worker.operations.Operation.output
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 143, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 593, in apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 594, in apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 776, in apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 782, in apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 834, in apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 780, in apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 587, in apache_beam.runners.common.PerWindowInvoker.invoke_process
self._invoke_process_per_window(
File "apache_beam/runners/common.py", line 659, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window
output_processor.process_outputs(
File "apache_beam/runners/common.py", line 880, in apache_beam.runners.common._OutputProcessor.process_outputs
def process_outputs(self, windowed_input_element, results):
File "apache_beam/runners/common.py", line 919, in apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 143, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 593, in apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 594, in apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 776, in apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 782, in apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 834, in apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 780, in apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 587, in apache_beam.runners.common.PerWindowInvoker.invoke_process
self._invoke_process_per_window(
File "apache_beam/runners/common.py", line 659, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window
output_processor.process_outputs(
File "apache_beam/runners/common.py", line 880, in apache_beam.runners.common._OutputProcessor.process_outputs
def process_outputs(self, windowed_input_element, results):
File "apache_beam/runners/common.py", line 919, in apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 143, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 593, in apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 594, in apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 776, in apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 782, in apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 849, in apache_beam.runners.common.DoFnRunner._reraise_augmented
raise_with_traceback(new_exn)
File "apache_beam/runners/common.py", line 780, in apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 441, in apache_beam.runners.common.SimpleInvoker.invoke_process
windowed_value, self.process_method(windowed_value.value))
File "streaming_twitter.py", line 203, in <lambda>
File "streaming_twitter.py", line 112, in estimate
File "streaming_twitter.py", line 96, in prediction
File "/usr/local/lib/python2.7/dist-packages/googleapiclient/_helpers.py", line 130, in positional_wrapper
return wrapped(*args, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/googleapiclient/http.py", line 851, in execute
method=str(self.method), body=self.body, headers=self.headers)
File "/usr/local/lib/python2.7/dist-packages/googleapiclient/http.py", line 165, in _retry_request
resp, content = http.request(uri, method, *args, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/google_auth_httplib2.py", line 198, in request
uri, method, body=body, headers=request_headers, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/httplib2/__init__.py", line 2133, in request
cachekey,
File "/usr/local/lib/python2.7/dist-packages/httplib2/__init__.py", line 1796, in _request
conn, request_uri, method, body, headers
File "/usr/local/lib/python2.7/dist-packages/httplib2/__init__.py", line 1737, in _conn_request
response = conn.getresponse()
File "/usr/lib/python2.7/httplib.py", line 1121, in getresponse
response.begin()
File "/usr/lib/python2.7/httplib.py", line 438, in begin
version, status, reason = self._read_status()
File "/usr/lib/python2.7/httplib.py", line 394, in _read_status
line = self.fp.readline(_MAXLINE + 1)
File "/usr/lib/python2.7/socket.py", line 480, in readline
data = self._sock.recv(self._rbufsize)
File "/usr/lib/python2.7/ssl.py", line 766, in recv
return self.read(buflen)
File "/usr/lib/python2.7/ssl.py", line 653, in read
v = self._sslobj.read(len)
RuntimeError: error: [Errno 0] Error [while running 'generatedPtransform-2222099']
java.util.concurrent.CompletableFuture.reportGet(CompletableFuture.java:357)
java.util.concurrent.CompletableFuture.get(CompletableFuture.java:1895)
org.apache.beam.sdk.util.MoreFutures.get(MoreFutures.java:57)
org.apache.beam.runners.dataflow.worker.fn.control.RegisterAndProcessBundleOperation.finish(RegisterAndProcessBundleOperation.java:330)
org.apache.beam.runners.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:85)
org.apache.beam.runners.dataflow.worker.fn.control.BeamFnMapTaskExecutor.execute(BeamFnMapTaskExecutor.java:125)
org.apache.beam.runners.dataflow.worker.StreamingDataflowWorker.process(StreamingDataflowWorker.java:1320)
org.apache.beam.runners.dataflow.worker.StreamingDataflowWorker.access$1000(StreamingDataflowWorker.java:151)
org.apache.beam.runners.dataflow.worker.StreamingDataflowWorker$6.run(StreamingDataflowWorker.java:1053)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.RuntimeException: Error received from SDK harness for instruction -2222113: Traceback (most recent call last):
File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/worker/sdk_worker.py", line 158, in _execute
response = task()
File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/worker/sdk_worker.py", line 191, in <lambda>
self._execute(lambda: worker.do_instruction(work), work)
File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/worker/sdk_worker.py", line 343, in do_instruction
request.instruction_id)
File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/worker/sdk_worker.py", line 369, in process_bundle
bundle_processor.process_bundle(instruction_id))
File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/worker/bundle_processor.py", line 663, in process_bundle
data.ptransform_id].process_encoded(data.data)
File "/usr/local/lib/python2.7/dist-packages/apache_beam/runners/worker/bundle_processor.py", line 143, in process_encoded
self.output(decoded_value)
File "apache_beam/runners/worker/operations.py", line 255, in apache_beam.runners.worker.operations.Operation.output
def output(self, windowed_value, output_index=0):
File "apache_beam/runners/worker/operations.py", line 256, in apache_beam.runners.worker.operations.Operation.output
cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 143, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 593, in apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 594, in apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 776, in apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 782, in apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 834, in apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 780, in apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 587, in apache_beam.runners.common.PerWindowInvoker.invoke_process
self._invoke_process_per_window(
File "apache_beam/runners/common.py", line 659, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window
output_processor.process_outputs(
File "apache_beam/runners/common.py", line 880, in apache_beam.runners.common._OutputProcessor.process_outputs
def process_outputs(self, windowed_input_element, results):
File "apache_beam/runners/common.py", line 919, in apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 143, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 593, in apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 594, in apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 776, in apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 782, in apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 834, in apache_beam.runners.common.DoFnRunner._reraise_augmented
raise
File "apache_beam/runners/common.py", line 780, in apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 587, in apache_beam.runners.common.PerWindowInvoker.invoke_process
self._invoke_process_per_window(
File "apache_beam/runners/common.py", line 659, in apache_beam.runners.common.PerWindowInvoker._invoke_process_per_window
output_processor.process_outputs(
File "apache_beam/runners/common.py", line 880, in apache_beam.runners.common._OutputProcessor.process_outputs
def process_outputs(self, windowed_input_element, results):
File "apache_beam/runners/common.py", line 919, in apache_beam.runners.common._OutputProcessor.process_outputs
self.main_receivers.receive(windowed_value)
File "apache_beam/runners/worker/operations.py", line 143, in apache_beam.runners.worker.operations.SingletonConsumerSet.receive
self.consumer.process(windowed_value)
File "apache_beam/runners/worker/operations.py", line 593, in apache_beam.runners.worker.operations.DoOperation.process
with self.scoped_process_state:
File "apache_beam/runners/worker/operations.py", line 594, in apache_beam.runners.worker.operations.DoOperation.process
delayed_application = self.dofn_receiver.receive(o)
File "apache_beam/runners/common.py", line 776, in apache_beam.runners.common.DoFnRunner.receive
self.process(windowed_value)
File "apache_beam/runners/common.py", line 782, in apache_beam.runners.common.DoFnRunner.process
self._reraise_augmented(exn)
File "apache_beam/runners/common.py", line 849, in apache_beam.runners.common.DoFnRunner._reraise_augmented
raise_with_traceback(new_exn)
File "apache_beam/runners/common.py", line 780, in apache_beam.runners.common.DoFnRunner.process
return self.do_fn_invoker.invoke_process(windowed_value)
File "apache_beam/runners/common.py", line 441, in apache_beam.runners.common.SimpleInvoker.invoke_process
windowed_value, self.process_method(windowed_value.value))
File "streaming_twitter.py", line 203, in <lambda>
File "streaming_twitter.py", line 112, in estimate
File "streaming_twitter.py", line 96, in prediction
File "/usr/local/lib/python2.7/dist-packages/googleapiclient/_helpers.py", line 130, in positional_wrapper
return wrapped(*args, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/googleapiclient/http.py", line 851, in execute
method=str(self.method), body=self.body, headers=self.headers)
File "/usr/local/lib/python2.7/dist-packages/googleapiclient/http.py", line 165, in _retry_request
resp, content = http.request(uri, method, *args, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/google_auth_httplib2.py", line 198, in request
uri, method, body=body, headers=request_headers, **kwargs)
File "/usr/local/lib/python2.7/dist-packages/httplib2/__init__.py", line 2133, in request
cachekey,
File "/usr/local/lib/python2.7/dist-packages/httplib2/__init__.py", line 1796, in _request
conn, request_uri, method, body, headers
File "/usr/local/lib/python2.7/dist-packages/httplib2/__init__.py", line 1737, in _conn_request
response = conn.getresponse()
File "/usr/lib/python2.7/httplib.py", line 1121, in getresponse
response.begin()
File "/usr/lib/python2.7/httplib.py", line 438, in begin
version, status, reason = self._read_status()
File "/usr/lib/python2.7/httplib.py", line 394, in _read_status
line = self.fp.readline(_MAXLINE + 1)
File "/usr/lib/python2.7/socket.py", line 480, in readline
data = self._sock.recv(self._rbufsize)
File "/usr/lib/python2.7/ssl.py", line 766, in recv
return self.read(buflen)
File "/usr/lib/python2.7/ssl.py", line 653, in read
v = self._sslobj.read(len)
RuntimeError: error: [Errno 0] Error [while running 'generatedPtransform-2222099']
org.apache.beam.runners.fnexecution.control.FnApiControlClient$ResponseStreamObserver.onNext(FnApiControlClient.java:157)
org.apache.beam.runners.fnexecution.control.FnApiControlClient$ResponseStreamObserver.onNext(FnApiControlClient.java:140)
org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.ServerCalls$StreamingServerCallHandler$StreamingServerCallListener.onMessage(ServerCalls.java:249)
org.apache.beam.vendor.grpc.v1p21p0.io.grpc.ForwardingServerCallListener.onMessage(ForwardingServerCallListener.java:33)
org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Contexts$ContextualizedServerCallListener.onMessage(Contexts.java:76)
org.apache.beam.vendor.grpc.v1p21p0.io.grpc.internal.ServerCallImpl$ServerStreamListenerImpl.messagesAvailable(ServerCallImpl.java:297)
org.apache.beam.vendor.grpc.v1p21p0.io.grpc.internal.ServerImpl$JumpToApplicationThreadServerStreamListener$1MessagesAvailable.runInContext(ServerImpl.java:738)
org.apache.beam.vendor.grpc.v1p21p0.io.grpc.internal.ContextRunnable.run(ContextRunnable.java:37)
org.apache.beam.vendor.grpc.v1p21p0.io.grpc.internal.SerializingExecutor.run(SerializingExecutor.java:123)
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
java.lang.Thread.run(Thread.java:745)
Processing stuck in step s02 for at least 05h20m00s without outputting or completing in state process
at sun.misc.Unsafe.park(Native Method)
at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175)
at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2039)
at org.apache.beam.runners.dataflow.worker.fn.data.RemoteGrpcPortWriteOperation.maybeWait(RemoteGrpcPortWriteOperation.java:175)
at org.apache.beam.runners.dataflow.worker.fn.data.RemoteGrpcPortWriteOperation.process(RemoteGrpcPortWriteOperation.java:196)
at org.apache.beam.runners.dataflow.worker.util.common.worker.OutputReceiver.process(OutputReceiver.java:49)
at org.apache.beam.runners.dataflow.worker.util.common.worker.ReadOperation.runReadLoop(ReadOperation.java:201)
at org.apache.beam.runners.dataflow.worker.util.common.worker.ReadOperation.start(ReadOperation.java:159)
at org.apache.beam.runners.dataflow.worker.util.common.worker.MapTaskExecutor.execute(MapTaskExecutor.java:77)
at org.apache.beam.runners.dataflow.worker.fn.control.BeamFnMapTaskExecutor.execute(BeamFnMapTaskExecutor.java:125)
at org.apache.beam.runners.dataflow.worker.StreamingDataflowWorker.process(StreamingDataflowWorker.java:1320)
at org.apache.beam.runners.dataflow.worker.StreamingDataflowWorker.access$1000(StreamingDataflowWorker.java:151)
at org.apache.beam.runners.dataflow.worker.StreamingDataflowWorker$6.run(StreamingDataflowWorker.java:1053)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Are you running some extremely slow operation during bundle finalization (DoFn.finish_bundle or DoFn.teardown) (for example, a per-element RPC call) ? If so please try to optimize to do that in batches. As the page you referred to points out, "Processing stuck" warning just means that DoFn did not transition out of a given state for a long period. Could be due to something being very slow or actually being stuck, for example while performing some RPC. If actually stuck, please try introducing a timeout to that operation.

How to run nested for loops in ParDo of Apache beam in google Dataflow and return json of dict

I run this code in my local pc it works, but when I tried to run it in google dataflow it gives me an error. I found the location of the error, which is in ParDo function. The first loop returns the value as desired however the second loop giving me an error. Can anybody suggest an alternative approach or figure out where is the issue.
The element['content'] is URL giving the location of the file with JSON data
class DimTrans(beam.DoFn):
def process(self, element):
import pandas as pd
import apache_beam as beam
data = pd.read_json(element['content'])
title = data[:1]
data = data[1:]
for idx, item in data.iterrows():
for dim, axis in item.items():
label = (title[dim].values[0])
d = {'axisDim': int(dim),
'axisOrder': int(idx),
'axisValue': float(axis),
'axisTitle': u'{}'.format(label)}
yield d
Error trace:
An exception was raised when trying to execute the workitem 533970500833477506 : Traceback (most recent call last): File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/batchworker.py", line 642, in do_work work_executor.execute() File "/usr/local/lib/python2.7/dist-packages/dataflow_worker/executor.py", line 156, in execute op.start() File "dataflow_worker/native_operations.py", line 38, in dataflow_worker.native_operations.NativeReadOperation.start def start(self): File "dataflow_worker/native_operations.py", line 39, in dataflow_worker.native_operations.NativeReadOperation.start with self.scoped_start_state: File "dataflow_worker/native_operations.py", line 44, in dataflow_worker.native_operations.NativeReadOperation.start with self.spec.source.reader() as reader: File "dataflow_worker/native_operations.py", line 54, in dataflow_worker.native_operations.NativeReadOperation.start self.output(windowed_value) File "apache_beam/runners/worker/operations.py", line 175, in apache_beam.runners.worker.operations.Operation.output cython.cast(Receiver, self.receivers[output_index]).receive(windowed_value) File "apache_beam/runners/worker/operations.py", line 85, in apache_beam.runners.worker.operations.ConsumerSet.receive cython.cast(Operation, consumer).process(windowed_value) File "apache_beam/runners/worker/operations.py", line 403, in apache_beam.runners.worker.operations.DoOperation.process with self.scoped_process_state: File "apache_beam/runners/worker/operations.py", line 404, in apache_beam.runners.worker.operations.DoOperation.process self.dofn_receiver.receive(o) File "apache_beam/runners/common.py", line 569, in apache_beam.runners.common.DoFnRunner.receive self.process(windowed_value) File "apache_beam/runners/common.py", line 577, in apache_beam.runners.common.DoFnRunner.process self._reraise_augmented(exn) File "apache_beam/runners/common.py", line 602, in apache_beam.runners.common.DoFnRunner._reraise_augmented raise File "apache_beam/runners/common.py", line 575, in apache_beam.runners.common.DoFnRunner.process self.do_fn_invoker.invoke_process(windowed_value) File "apache_beam/runners/common.py", line 352, in apache_beam.runners.common.SimpleInvoker.invoke_process output_processor.process_outputs( File "apache_beam/runners/common.py", line 673, in apache_beam.runners.common._OutputProcessor.process_outputs self.main_receivers.receive(windowed_value) File "apache_beam/runners/worker/operations.py", line 85, in apache_beam.runners.worker.operations.ConsumerSet.receive cython.cast(Operation, consumer).process(windowed_value) File "apache_beam/runners/worker/operations.py", line 403, in apache_beam.runners.worker.operations.DoOperation.process with self.scoped_process_state: File "apache_beam/runners/worker/operations.py", line 404, in apache_beam.runners.worker.operations.DoOperation.process self.dofn_receiver.receive(o) File "apache_beam/runners/common.py", line 569, in apache_beam.runners.common.DoFnRunner.receive self.process(windowed_value) File "apache_beam/runners/common.py", line 577, in apache_beam.runners.common.DoFnRunner.process self._reraise_augmented(exn) File "apache_beam/runners/common.py", line 618, in apache_beam.runners.common.DoFnRunner._reraise_augmented six.reraise(type(new_exn), new_exn, original_traceback) File "apache_beam/runners/common.py", line 575, in apache_beam.runners.common.DoFnRunner.process self.do_fn_invoker.invoke_process(windowed_value) File "apache_beam/runners/common.py", line 352, in apache_beam.runners.common.SimpleInvoker.invoke_process output_processor.process_outputs( File "apache_beam/runners/common.py", line 651, in apache_beam.runners.common._OutputProcessor.process_outputs for result in results: File "C:/Users/PycharmProjects/GCP/test1file.py", line 56, in process File "/usr/local/lib/python2.7/dist-packages/pandas/core/generic.py", line 2668, in getattr return object.getattribute(self, name) AttributeError: 'Series' object has no attribute 'items' [while running 'Print Results/ParDo(DimTrans)']

How to setup django-celery-beat with multi tenant (django-tenant-schemas) Django application.

I am trying to run celery beat along with celery. Celery works fine with my multitenant Django application (using django-tenant-schemas) with the help of tenant_schemas_celery. But I am not able to run the celery beat as all the scheduled task/periodic task related tables are tenant-specific hence not able to start the celery beat.
Error trace:
celery#Amits-iMac.local v4.0.2 (latentcall)
Darwin-17.5.0-x86_64-i386-64bit 2018-05-04 18:20:22
[config]
.> app: __main__:0x102ef9f28
.> transport: amqp://guest:**#localhost:5672//
.> results:
.> concurrency: 4 (prefork)
.> task events: ON
[queues]
.> celery exchange=celery(direct) key=celery
[tasks]
. dataflow.tasks.launch_dataflow
. datasession.tasks.launch_copy
[2018-05-04 18:20:23,035: INFO/Beat] beat: Starting...
[2018-05-04 18:20:23,166: ERROR/Beat] Process Beat
Traceback (most recent call last):
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/kombu/utils/objects.py", line 42, in __get__
return obj.__dict__[self.__name__]
KeyError: 'scheduler'
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/backends/utils.py", line 65, in execute
return self.cursor.execute(sql, params)
psycopg2.ProgrammingError: relation "django_celery_beat_periodictask" does not exist
LINE 1: ...ango_celery_beat_periodictask"."description" FROM "django_ce...
^
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/billiard/process.py", line 306, in _bootstrap
self.run()
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/celery/beat.py", line 613, in run
self.service.start(embedded_process=True)
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/celery/beat.py", line 528, in start
humanize_seconds(self.scheduler.max_interval))
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/kombu/utils/objects.py", line 44, in __get__
value = obj.__dict__[self.__name__] = self.__get(obj)
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/celery/beat.py", line 572, in scheduler
return self.get_scheduler()
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/celery/beat.py", line 567, in get_scheduler
lazy=lazy,
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django_celery_beat/schedulers.py", line 181, in __init__
Scheduler.__init__(self, *args, **kwargs)
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/celery/beat.py", line 204, in __init__
self.setup_schedule()
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django_celery_beat/schedulers.py", line 189, in setup_schedule
self.install_default_entries(self.schedule)
File "/Users/amit/thoughtanalytik/koolanch/src/koolanch/beat_schedulers.py", line 27, in schedule
self._schedule = self.all_as_schedule()
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django_celery_beat/schedulers.py", line 195, in all_as_schedule
for model in self.Model.objects.enabled():
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/models/query.py", line 250, in __iter__
self._fetch_all()
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/models/query.py", line 1118, in _fetch_all
self._result_cache = list(self._iterable_class(self))
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/models/query.py", line 53, in __iter__
results = compiler.execute_sql(chunked_fetch=self.chunked_fetch)
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/models/sql/compiler.py", line 894, in execute_sql
raise original_exception
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/models/sql/compiler.py", line 884, in execute_sql
cursor.execute(sql, params)
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/backends/utils.py", line 80, in execute
return super(CursorDebugWrapper, self).execute(sql, params)
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/backends/utils.py", line 65, in execute
return self.cursor.execute(sql, params)
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/utils.py", line 94, in __exit__
six.reraise(dj_exc_type, dj_exc_value, traceback)
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/utils/six.py", line 685, in reraise
raise value.with_traceback(tb)
File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/backends/utils.py", line 65, in execute
return self.cursor.execute(sql, params)
django.db.utils.ProgrammingError: relation "django_celery_beat_periodictask" does not exist
LINE 1: ...ango_celery_beat_periodictask"."description" FROM "django_ce...
^
[2018-05-04 18:20:23,219: WARNING/Beat] Process Beat:
[2018-05-04 18:20:23,220: WARNING/Beat] Traceback (most recent call last):
[2018-05-04 18:20:23,220: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/kombu/utils/objects.py", line 42, in __get__
return obj.__dict__[self.__name__]
[2018-05-04 18:20:23,220: WARNING/Beat] KeyError: 'scheduler'
[2018-05-04 18:20:23,220: WARNING/Beat] During handling of the above exception, another exception occurred:
[2018-05-04 18:20:23,220: WARNING/Beat] Traceback (most recent call last):
[2018-05-04 18:20:23,221: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/backends/utils.py", line 65, in execute
return self.cursor.execute(sql, params)
[2018-05-04 18:20:23,221: WARNING/Beat] psycopg2.ProgrammingError: relation "django_celery_beat_periodictask" does not exist
LINE 1: ...ango_celery_beat_periodictask"."description" FROM "django_ce...
^
[2018-05-04 18:20:23,221: WARNING/Beat] The above exception was the direct cause of the following exception:
[2018-05-04 18:20:23,221: WARNING/Beat] Traceback (most recent call last):
[2018-05-04 18:20:23,221: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/billiard/process.py", line 306, in _bootstrap
self.run()
[2018-05-04 18:20:23,222: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/celery/beat.py", line 613, in run
self.service.start(embedded_process=True)
[2018-05-04 18:20:23,222: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/celery/beat.py", line 528, in start
humanize_seconds(self.scheduler.max_interval))
[2018-05-04 18:20:23,222: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/kombu/utils/objects.py", line 44, in __get__
value = obj.__dict__[self.__name__] = self.__get(obj)
[2018-05-04 18:20:23,222: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/celery/beat.py", line 572, in scheduler
return self.get_scheduler()
[2018-05-04 18:20:23,222: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/celery/beat.py", line 567, in get_scheduler
lazy=lazy,
[2018-05-04 18:20:23,222: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django_celery_beat/schedulers.py", line 181, in __init__
Scheduler.__init__(self, *args, **kwargs)
[2018-05-04 18:20:23,223: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/celery/beat.py", line 204, in __init__
self.setup_schedule()
[2018-05-04 18:20:23,223: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django_celery_beat/schedulers.py", line 189, in setup_schedule
self.install_default_entries(self.schedule)
[2018-05-04 18:20:23,223: WARNING/Beat] File "/Users/amit/thoughtanalytik/koolanch/src/koolanch/beat_schedulers.py", line 27, in schedule
self._schedule = self.all_as_schedule()
[2018-05-04 18:20:23,223: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django_celery_beat/schedulers.py", line 195, in all_as_schedule
for model in self.Model.objects.enabled():
[2018-05-04 18:20:23,223: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/models/query.py", line 250, in __iter__
self._fetch_all()
[2018-05-04 18:20:23,224: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/models/query.py", line 1118, in _fetch_all
self._result_cache = list(self._iterable_class(self))
[2018-05-04 18:20:23,224: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/models/query.py", line 53, in __iter__
results = compiler.execute_sql(chunked_fetch=self.chunked_fetch)
[2018-05-04 18:20:23,224: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/models/sql/compiler.py", line 894, in execute_sql
raise original_exception
[2018-05-04 18:20:23,224: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/models/sql/compiler.py", line 884, in execute_sql
cursor.execute(sql, params)
[2018-05-04 18:20:23,224: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/backends/utils.py", line 80, in execute
return super(CursorDebugWrapper, self).execute(sql, params)
[2018-05-04 18:20:23,224: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/backends/utils.py", line 65, in execute
return self.cursor.execute(sql, params)
[2018-05-04 18:20:23,224: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/utils.py", line 94, in __exit__
six.reraise(dj_exc_type, dj_exc_value, traceback)
[2018-05-04 18:20:23,225: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/utils/six.py", line 685, in reraise
raise value.with_traceback(tb)
[2018-05-04 18:20:23,225: WARNING/Beat] File "/Users/amit/koolanch-dev/dev-1/lib/python3.6/site-packages/django/db/backends/utils.py", line 65, in execute
return self.cursor.execute(sql, params)
[2018-05-04 18:20:23,225: WARNING/Beat] django.db.utils.ProgrammingError: relation "django_celery_beat_periodictask" does not exist
LINE 1: ...ango_celery_beat_periodictask"."description" FROM "django_ce...
Thanks in advance.
Celery beat (using django_celery_beat)by default connects to public schema and monitors django_celery_beat_periodictask table for the periodic task. Hence we must have these table in public schema. So django_celery_beat app should be part of the public schema and all the periodic task must be created in public schema.
with schema_context('public'):
schedule, created = IntervalSchedule.objects.get_or_create(
every=execution_interval,
period=IntervalSchedule.MINUTES)
pt_obj, created = PeriodicTask.objects.get_or_create(name=name,
task='tasks.task_name')
pt_obj.interval = schedule
pt_obj.enabled = False
pt_obj.args = json.dumps([])
pt_obj.kwargs = json.dumps({})
pt_obj.save()
There is a package named tenant-schemas-celery available here which is recommended by django-tenant-schemas itself, which is mentioned here.

Unhandled exception in thread started by <function wrapper at 0x7f8c05fc8a28>

nandu#nandu-Lenovo-ideapad-320-15IKB:~/Django/newproject$ python manage.py runserver
Performing system checks...
System check identified no issues (0 silenced).
Unhandled exception in thread started by <function wrapper at 0x7f8c05fc8a28>
Traceback (most recent call last):
File "/usr/lib/python2.7/dist-packages/django/utils/autoreload.py", line 229, in wrapper
fn(*args, **kwargs)
File "/usr/lib/python2.7/dist-packages/django/core/management/commands/runserver.py", line 116, in inner_run
self.check_migrations()
File "/usr/lib/python2.7/dist-packages/django/core/management/commands/runserver.py", line 168, in check_migrations
executor = MigrationExecutor(connections[DEFAULT_DB_ALIAS])
File "/usr/lib/python2.7/dist-packages/django/db/migrations/executor.py", line 19, in __init__
self.loader = MigrationLoader(self.connection)
File "/usr/lib/python2.7/dist-packages/django/db/migrations/loader.py", line 47, in __init__
self.build_graph()
File "/usr/lib/python2.7/dist-packages/django/db/migrations/loader.py", line 191, in build_graph
self.applied_migrations = recorder.applied_migrations()
File "/usr/lib/python2.7/dist-packages/django/db/migrations/recorder.py", line 59, in applied_migrations
self.ensure_schema()
File "/usr/lib/python2.7/dist-packages/django/db/migrations/recorder.py", line 49, in ensure_schema
if self.Migration._meta.db_table in self.connection.introspection.table_names(self.connection.cursor()):
File "/usr/lib/python2.7/dist-packages/django/db/backends/base/base.py", line 162, in cursor
cursor = self.make_debug_cursor(self._cursor())
File "/usr/lib/python2.7/dist-packages/django/db/backends/base/base.py", line 135, in _cursor
self.ensure_connection()
File "/usr/lib/python2.7/dist-packages/django/db/backends/base/base.py", line 130, in ensure_connection
self.connect()
File "/usr/lib/python2.7/dist-packages/django/db/utils.py", line 98, in __exit__
six.reraise(dj_exc_type, dj_exc_value, traceback)
File "/usr/lib/python2.7/dist-packages/django/db/backends/base/base.py", line 130, in ensure_connection
self.connect()
File "/usr/lib/python2.7/dist-packages/django/db/backends/base/base.py", line 119, in connect
self.connection = self.get_new_connection(conn_params)
File "/usr/lib/python2.7/dist-packages/django/db/backends/sqlite3/base.py", line 204, in get_new_connection
conn = Database.connect(**conn_params)
django.db.utils.OperationalError: unable to open database file