I am customizing an appfuse 3.0.0 Web Service Only artifact. I have faced this stacktrace when trying to add a checked exception to my Web Service:
[ERROR] Failed to execute goal org.codehaus.enunciate:maven-enunciate-cxf-plugin:1.28:assemble (default) on project ibnInq: Problem assembling the enunciate app. local part cannot be "null" when creating a QName -> [Help 1]
org.apache.maven.lifecycle.LifecycleExecutionException: Failed to execute goal org.codehaus.enunciate:maven-enunciate-cxf-plugin:1.28:assemble (default) on project ibnInq: Problem assembling the enunciate app.
Problem assembling the enunciate app.
at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:216)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:153)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:145)
at org.apache.maven.lifecycle.internal.MojoExecutor.executeForkedExecutions(MojoExecutor.java:364)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:198)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:153)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:145)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:116)
at org.apache.maven.lifecycle.internal.LifecycleModuleBuilder.buildProject(LifecycleModuleBuilder.java:80)
at org.apache.maven.lifecycle.internal.builder.singlethreaded.SingleThreadedBuilder.build(SingleThreadedBuilder.java:51)
at org.apache.maven.lifecycle.internal.LifecycleStarter.execute(LifecycleStarter.java:120)
at org.apache.maven.DefaultMaven.doExecute(DefaultMaven.java:355)
at org.apache.maven.DefaultMaven.execute(DefaultMaven.java:155)
at org.apache.maven.cli.MavenCli.execute(MavenCli.java:584)
at org.apache.maven.cli.MavenCli.doMain(MavenCli.java:216)
at org.apache.maven.cli.MavenCli.main(MavenCli.java:160)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.codehaus.plexus.classworlds.launcher.Launcher.launchEnhanced(Launcher.java:289)
at org.codehaus.plexus.classworlds.launcher.Launcher.launch(Launcher.java:229)
at org.codehaus.plexus.classworlds.launcher.Launcher.mainWithExitCode(Launcher.java:415)
at org.codehaus.plexus.classworlds.launcher.Launcher.main(Launcher.java:356)
Caused by: org.apache.maven.plugin.MojoExecutionException: Problem assembling the enunciate app.
at org.codehaus.enunciate.AssembleMojo.execute(AssembleMojo.java:75)
at org.apache.maven.plugin.DefaultBuildPluginManager.executeMojo(DefaultBuildPluginManager.java:132)
at org.apache.maven.lifecycle.internal.MojoExecutor.execute(MojoExecutor.java:208)
... 23 more
Caused by: java.lang.IllegalArgumentException: local part cannot be "null" when creating a QName
at javax.xml.namespace.QName.<init>(QName.java:244)
at javax.xml.namespace.QName.<init>(QName.java:188)
at org.codehaus.enunciate.contract.jaxws.WebFault.getParticleQName(WebFault.java:280)
at org.codehaus.enunciate.contract.jaxws.WebMethod.getReferencedNamespaces(WebMethod.java:232)
at org.codehaus.enunciate.contract.jaxws.EndpointInterface.getReferencedNamespaces(EndpointInterface.java:229)
at org.codehaus.enunciate.config.WsdlInfo.getImportedNamespaces(WsdlInfo.java:132)
at org.codehaus.enunciate.modules.xml.WsdlInfoModel.get(WsdlInfoModel.java:50)
at freemarker.core.Dot._getAsTemplateModel(Dot.java:76)
at freemarker.core.Expression.getAsTemplateModel(Expression.java:89)
at freemarker.core.IteratorBlock.accept(IteratorBlock.java:94)
at freemarker.core.Environment.visit(Environment.java:221)
at freemarker.core.MixedContent.accept(MixedContent.java:92)
at freemarker.core.Environment.visit(Environment.java:221)
at freemarker.core.Macro$Context.runMacro(Macro.java:172)
at freemarker.core.Environment.visit(Environment.java:614)
at freemarker.core.UnifiedCall.accept(UnifiedCall.java:106)
at freemarker.core.Environment.visit(Environment.java:221)
at freemarker.core.Environment.visit(Environment.java:310)
at freemarker.core.CompressedBlock.accept(CompressedBlock.java:73)
at freemarker.core.Environment.visit(Environment.java:221)
at freemarker.core.Environment.visit(Environment.java:310)
at freemarker.core.UnifiedCall.accept(UnifiedCall.java:130)
at freemarker.core.Environment.visit(Environment.java:221)
at freemarker.core.ConditionalBlock.accept(ConditionalBlock.java:79)
at freemarker.core.Environment.visit(Environment.java:221)
at freemarker.core.Environment.visit(Environment.java:310)
at freemarker.core.UnifiedCall.accept(UnifiedCall.java:130)
at freemarker.core.Environment.visit(Environment.java:221)
at freemarker.core.MixedContent.accept(MixedContent.java:92)
at freemarker.core.Environment.visit(Environment.java:221)
at freemarker.core.Environment.process(Environment.java:199)
at freemarker.template.Template.process(Template.java:259)
at org.codehaus.enunciate.modules.FreemarkerDeploymentModule.processTemplate(FreemarkerDeploymentModule.java:106)
at org.codehaus.enunciate.modules.FreemarkerDeploymentModule.processTemplate(FreemarkerDeploymentModule.java:85)
at org.codehaus.enunciate.modules.FreemarkerDeploymentModule.processTemplate(FreemarkerDeploymentModule.java:70)
at org.codehaus.enunciate.modules.xml.XMLDeploymentModule.doFreemarkerGenerate(XMLDeploymentModule.java:340)
at org.codehaus.enunciate.modules.FreemarkerDeploymentModule.doGenerate(FreemarkerDeploymentModule.java:51)
at org.codehaus.enunciate.modules.BasicDeploymentModule.step(BasicDeploymentModule.java:107)
at org.codehaus.enunciate.apt.EnunciateAnnotationProcessor.process(EnunciateAnnotationProcessor.java:128)
at com.sun.mirror.apt.AnnotationProcessors$CompositeAnnotationProcessor.process(AnnotationProcessors.java:84)
at com.sun.tools.apt.comp.Apt.main(Apt.java:480)
at com.sun.tools.apt.main.AptJavaCompiler.compile(AptJavaCompiler.java:270)
at com.sun.tools.apt.main.Main.compile(Main.java:1127)
at com.sun.tools.apt.main.Main.compile(Main.java:989)
at com.sun.tools.apt.Main.processing(Main.java:113)
at com.sun.tools.apt.Main.process(Main.java:103)
at com.sun.tools.apt.Main.process(Main.java:85)
at org.codehaus.enunciate.main.Enunciate.invokeApt(Enunciate.java:817)
at org.codehaus.enunciate.main.Enunciate.doGenerate(Enunciate.java:401)
at org.codehaus.enunciate.ConfigMojo$MavenSpecificEnunciate.doGenerate(ConfigMojo.java:670)
at org.codehaus.enunciate.main.Enunciate$Stepper.step(Enunciate.java:1799)
at org.codehaus.enunciate.main.Enunciate$Stepper.stepTo(Enunciate.java:1831)
at org.codehaus.enunciate.AssembleMojo.execute(AssembleMojo.java:71)
... 25 more
My enunciate version is 1.28, and I have enabled cxf module in enunciate.xml:
<cxf disabled="false"/>
I have used JAX-WS Spec for exception throwing.
Any help is appreciated.
Finally I have done following workaround to fix my problem: adding #XmlRootElement to my FaultBean.
#XmlRootElement
#XmlAccessorType(XmlAccessType.FIELD)
#XmlType(name = "FaultBean",propOrder = {"errorDescription",errorCode"})
public class FaultBean implements Serializable {
#XmlElement(required = true, nillable = true)
protected String errorDescription;
#XmlElement(required = true, nillable = true)
protected String errorCode;
public FaultBean() {
}
public String getErrorDescription() {
return this.errorDescription;
}
public void setErrorDescription(String var1) {
this.errorDescription = var1;
}
public String getErrorCode() {
return this.errorCode;
}
public void setErrorCode(String var1) {
this.errorCode = var1;
}
}
Related
I've written a test class for kafka stream application as per https://kafka.apache.org/24/documentation/streams/developer-guide/testing.html
, the code for which is
import com.EventSerde;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.*;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.Properties;
public class KafkaStreamsConfigTest {
private TopologyTestDriver testDriver;
private TestInputTopic<String, Object> inputTopic;
private TestOutputTopic<String, Object> outputTopic;
private Serde<String> stringSerde = new Serdes.StringSerde();
private EventSerde eventSerde= new EventSerde();
private String key="test";
private Object value = "some value";
private Object expected_value = "real value";
String kafkaEventSourceTopic = "raw_events";
String kafkaEventSinkTopic = "processed_events";
String kafkaCacheSinkTopic = "cache_objects";
String applicationId = "my-app";
String test_dummy = "dummy:1234";
#Before
public void setup() {
Topology topology = new Topology();
topology.addSource(kafkaEventSourceTopic, kafkaEventSourceTopic);
topology.addProcessor(ProcessRouter.class.getSimpleName(), ProcessRouter::new, kafkaEventSourceTopic);
topology.addProcessor(WorkforceVisit.class.getSimpleName(), WorkforceVisit::new
, ProcessRouter.class.getSimpleName());
topology.addProcessor(DefaultProcessor.class.getSimpleName(), DefaultProcessor::new
, ProcessRouter.class.getSimpleName());
topology.addProcessor(CacheWorkforceShift.class.getSimpleName(), CacheWorkforceShift::new
, ProcessRouter.class.getSimpleName());
topology.addProcessor(DigitalcareShiftassisstantTracking.class.getSimpleName(), DigitalcareShiftassisstantTracking::new
, ProcessRouter.class.getSimpleName());
topology.addProcessor(WorkforceLocationUpdate.class.getSimpleName(), WorkforceLocationUpdate::new
, ProcessRouter.class.getSimpleName());
topology.addSink(kafkaEventSinkTopic, kafkaEventSinkTopic
, WorkforceVisit.class.getSimpleName(), DefaultProcessor.class.getSimpleName()
, CacheWorkforceShift.class.getSimpleName(), DigitalcareShiftassisstantTracking.class.getSimpleName()
, WorkforceLocationUpdate.class.getSimpleName());
topology.addSink(kafkaCacheSinkTopic, kafkaCacheSinkTopic
, WorkforceVisit.class.getSimpleName()
, CacheWorkforceShift.class.getSimpleName(), DigitalcareShiftassisstantTracking.class.getSimpleName()
, WorkforceLocationUpdate.class.getSimpleName());
Properties properties = new Properties();
properties.put(StreamsConfig.APPLICATION_ID_CONFIG, applicationId);
properties.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, test_dummy);
properties.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
properties.put(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, EventSerde.class.getName());
testDriver = new TopologyTestDriver(topology, properties);
//setup test topics
inputTopic = testDriver.createInputTopic(kafkaEventSourceTopic, stringSerde.serializer(), eventSerde.serializer());
outputTopic = testDriver.createOutputTopic(kafkaEventSinkTopic, stringSerde.deserializer(), eventSerde.deserializer());
}
#After
public void tearDown() {
testDriver.close();
}
#Test
public void outputEqualsTrue()
{
inputTopic.pipeInput(key, value);
Object b = outputTopic.readValue();
System.out.println(b.toString());
assertEquals(b,expected_value);
}
where I used EventSerde class to serialize and deserialize the value.
When I run this code it gives the error java.util.NoSuchElementException: Uninitialized topic: processed_events with the following stacktrace:
java.util.NoSuchElementException: Uninitialized topic: processed_events
at org.apache.kafka.streams.TopologyTestDriver.readRecord(TopologyTestDriver.java:715)
at org.apache.kafka.streams.TestOutputTopic.readRecord(TestOutputTopic.java:100)
at org.apache.kafka.streams.TestOutputTopic.readValue(TestOutputTopic.java:80)
at com.uhx.platform.eventprocessor.config.KafkaStreamsConfigTest.outputEqualsTrue(KafkaStreamsConfigTest.java:111)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.base/java.lang.reflect.Method.invoke(Method.java:566)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329)
at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293)
at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306)
at org.junit.runners.ParentRunner.run(ParentRunner.java:413)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
at com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
at com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)
As you can see i have initialized both input and output topics.
I have also debugged the code and the error occurs when i read the value from output topic
outputTopic.readValue();
I don't understand what else i should do to initialize the outputTopic. Can anyone help me with this problem?
I am using apache kafka-streams-test-utils 2.4.0 and kafka-streams 2.4.0
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams</artifactId>
<version>2.4.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>2.4.0</version>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-streams-test-utils</artifactId>
<version>2.4.0</version>
<scope>test</scope>
</dependency>
To avoid/overcome this exception, you need to check if your output topic is not empty before trying to read from it.
#Test
public void outputEqualsTrue()
{
inputTopic.pipeInput(key, value);
assert(outputTopic.isEmpty(), false);
Object b = outputTopic.readValue();
System.out.println(b.toString());
assertEquals(b,expected_value);
}
I'm using the spring schedule to do some full treatment at night, it should invoke a soap webServices and nothing else.
The program works fine on my local server, but not on PROD :
scheduler :
#Configuration
#EnableScheduling
public class ActivationScheduler {
private static final Logger LOGGER = LoggerFactory.getLogger(ActivationScheduler.class);
#Autowired
private ActivationService activationService;
/**
* un cron qui se lance automatiquement tous les soirs à 04h:00
*/
#Scheduled(cron = "0 20 11 * * *")
public void execute() {
try {
activationService.relanceAll();
} catch (Exception e) {
LOGGER.error(Constants.EXCEPTION_LABEL, e);
}
}
}
Exception :
2017-11-10 11:20:00 ERROR TaskUtils$LoggingErrorHandler:95 - Unexpected error occurred in scheduled task.
java.lang.NoClassDefFoundError: org/bouncycastle/jce/provider/JCEBlockCipher$AEADGenericBlockCipher
at org.bouncycastle.jce.provider.JCEBlockCipher.engineSetMode(Unknown Source)
at javax.crypto.Cipher$Transform.setModePadding(Cipher.java:374)
at javax.crypto.Cipher.getInstance(Cipher.java:533)
at sun.security.ssl.JsseJce.getCipher(JsseJce.java:229)
at sun.security.ssl.CipherBox.<init>(CipherBox.java:179)
at sun.security.ssl.CipherBox.newCipherBox(CipherBox.java:263)
at sun.security.ssl.CipherSuite$BulkCipher.newCipher(CipherSuite.java:505)
at sun.security.ssl.CipherSuite$BulkCipher.isAvailable(CipherSuite.java:572)
at sun.security.ssl.CipherSuite$BulkCipher.isAvailable(CipherSuite.java:527)
at sun.security.ssl.CipherSuite.isAvailable(CipherSuite.java:194)
at sun.security.ssl.SSLContextImpl.getApplicableCipherSuiteList(SSLContextImpl.java:346)
at sun.security.ssl.SSLContextImpl.getDefaultCipherSuiteList(SSLContextImpl.java:304)
at sun.security.ssl.SSLSocketImpl.init(SSLSocketImpl.java:626)
at sun.security.ssl.SSLSocketImpl.<init>(SSLSocketImpl.java:537)
at sun.security.ssl.SSLSocketFactoryImpl.createSocket(SSLSocketFactoryImpl.java:72)
at sun.net.www.protocol.https.HttpsClient.createSocket(HttpsClient.java:405)
at sun.net.NetworkClient.doConnect(NetworkClient.java:162)
at sun.net.www.http.HttpClient.openServer(HttpClient.java:432)
at sun.net.www.http.HttpClient.openServer(HttpClient.java:527)
at sun.net.www.protocol.https.HttpsClient.<init>(HttpsClient.java:264)
at sun.net.www.protocol.https.HttpsClient.New(HttpsClient.java:367)
at sun.net.www.protocol.https.AbstractDelegateHttpsURLConnection.getNewHttpClient(AbstractDelegateHttpsURLConnection.java:191)
at sun.net.www.protocol.http.HttpURLConnection.plainConnect0(HttpURLConnection.java:1105)
at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:999)
at sun.net.www.protocol.https.AbstractDelegateHttpsURLConnection.connect(AbstractDelegateHttpsURLConnection.java:177)
at sun.net.www.protocol.http.HttpURLConnection.getOutputStream0(HttpURLConnection.java:1283)
at sun.net.www.protocol.http.HttpURLConnection.getOutputStream(HttpURLConnection.java:1258)
at sun.net.www.protocol.https.HttpsURLConnectionImpl.getOutputStream(HttpsURLConnectionImpl.java:250)
at org.springframework.ws.transport.http.HttpUrlConnection.getRequestOutputStream(HttpUrlConnection.java:89)
at org.springframework.ws.transport.AbstractSenderConnection$RequestTransportOutputStream.createOutputStream(AbstractSenderConnection.java:87)
at org.springframework.ws.transport.TransportOutputStream.getOutputStream(TransportOutputStream.java:41)
at org.springframework.ws.transport.TransportOutputStream.write(TransportOutputStream.java:64)
at com.sun.xml.internal.messaging.saaj.soap.MessageImpl.writeTo(MessageImpl.java:1314)
at org.springframework.ws.soap.saaj.SaajSoapMessage.writeTo(SaajSoapMessage.java:275)
at org.springframework.ws.transport.AbstractWebServiceConnection.send(AbstractWebServiceConnection.java:46)
at org.springframework.ws.client.core.WebServiceTemplate.sendRequest(WebServiceTemplate.java:658)
at org.springframework.ws.client.core.WebServiceTemplate.doSendAndReceive(WebServiceTemplate.java:606)
at org.springframework.ws.client.core.WebServiceTemplate.sendAndReceive(WebServiceTemplate.java:555)
at org.springframework.ws.client.core.WebServiceTemplate.marshalSendAndReceive(WebServiceTemplate.java:390)
at org.springframework.ws.client.core.WebServiceTemplate.marshalSendAndReceive(WebServiceTemplate.java:383)
at org.springframework.ws.client.core.WebServiceTemplate.marshalSendAndReceive(WebServiceTemplate.java:373)
I googled the exception, so I found some poeple talking about bouncycastle, I added the last version but it does'nt solve my problem and i had the same exception.
I'm using also dynamicreports 5.0.0 in the project and i excluded bouncycastle (bcprov-jdk14, bcmail-jdk14, bctsp-jdk14)
The last info that can be utile, the ws invoked have SSL security.
Thanks,
I have been using org.elasticsearch.test.ElasticsearchIntegrationTesting class for testing elasticsearch functionality.
I Use below code to import data into Elasticsearch and to search that data :**
#Test
public void searchData() throws Exception{
logger.info("searchData : searching data from Elasticsearch");
try {
csvImporter.client = client();
csvImporter.importData(); // In the importData method after data is imported we close the client
//In the below line getting Exception and it is inconsistent. Some times works fine
SearchResponse searchResponse = csvImporter.client.prepareSearch().setQuery(matchAllQuery()).execute().actionGet();
for (SearchHit searchHit : searchResponse.getHits()){
logger.info("Source Data >>>>>>>>>>>>>>>>>>>>> " + searchHit.sourceAsString());
}
} finally {
csvImporter.client.close();
}
}
Exception:
org.elasticsearch.client.transport.NoNodeAvailableException: None of the configured nodes are available: []
at __randomizedtesting.SeedInfo.seed([86EB1B4AAE1739A5:99315B8D3D924DA1]:0)
at org.elasticsearch.client.transport.TransportClientNodesService.ensureNodesAreAvailable(TransportClientNodesService.java:305)
at org.elasticsearch.client.transport.TransportClientNodesService.execute(TransportClientNodesService.java:200)
at org.elasticsearch.client.transport.support.InternalTransportIndicesAdminClient.execute(InternalTransportIndicesAdminClient.java:86)
at org.elasticsearch.client.support.AbstractIndicesAdminClient.deleteTemplate(AbstractIndicesAdminClient.java:687)
at org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequestBuilder.doExecute(DeleteIndexTemplateRequestBuilder.java:40)
at org.elasticsearch.action.ActionRequestBuilder.execute(ActionRequestBuilder.java:91)
at org.elasticsearch.action.ActionRequestBuilder.execute(ActionRequestBuilder.java:65)
at org.elasticsearch.test.TestCluster.wipeTemplates(TestCluster.java:177)
at org.elasticsearch.test.TestCluster.wipe(TestCluster.java:76)
at org.elasticsearch.test.ElasticsearchIntegrationTest.beforeInternal(ElasticsearchIntegrationTest.java:292)
at org.elasticsearch.test.ElasticsearchIntegrationTest.before(ElasticsearchIntegrationTest.java:1946)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at com.carrotsearch.randomizedtesting.RandomizedRunner.invoke(RandomizedRunner.java:1665)
at com.carrotsearch.randomizedtesting.RandomizedRunner$9.evaluate(RandomizedRunner.java:898)
at com.carrotsearch.randomizedtesting.RandomizedRunner$10.evaluate(RandomizedRunner.java:914)
at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:55)
at org.apache.lucene.util.TestRuleSetupTeardownChained$1.evaluate(TestRuleSetupTeardownChained.java:50)
at org.apache.lucene.util.TestRuleFieldCacheSanity$1.evaluate(TestRuleFieldCacheSanity.java:51)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:46)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesInvariantRule$1.evaluate(SystemPropertiesInvariantRule.java:59)
at org.apache.lucene.util.TestRuleThreadAndTestName$1.evaluate(TestRuleThreadAndTestName.java:49)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:65)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:48)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
at com.carrotsearch.randomizedtesting.ThreadLeakControl.forkTimeoutingTask(ThreadLeakControl.java:809)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$3.evaluate(ThreadLeakControl.java:460)
at com.carrotsearch.randomizedtesting.RandomizedRunner.runSingleTest(RandomizedRunner.java:873)
at com.carrotsearch.randomizedtesting.RandomizedRunner$5.evaluate(RandomizedRunner.java:775)
at com.carrotsearch.randomizedtesting.RandomizedRunner$6.evaluate(RandomizedRunner.java:809)
at com.carrotsearch.randomizedtesting.RandomizedRunner$7.evaluate(RandomizedRunner.java:820)
at org.apache.lucene.util.AbstractBeforeAfterRule$1.evaluate(AbstractBeforeAfterRule.java:46)
at org.apache.lucene.util.TestRuleStoreClassName$1.evaluate(TestRuleStoreClassName.java:42)
at com.carrotsearch.randomizedtesting.rules.SystemPropertiesInvariantRule$1.evaluate(SystemPropertiesInvariantRule.java:59)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:39)
at com.carrotsearch.randomizedtesting.rules.NoShadowingOrOverridesOnMethodsRule$1.evaluate(NoShadowingOrOverridesOnMethodsRule.java:39)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at org.apache.lucene.util.TestRuleAssertionsRequired$1.evaluate(TestRuleAssertionsRequired.java:43)
at org.apache.lucene.util.TestRuleMarkFailure$1.evaluate(TestRuleMarkFailure.java:48)
at org.apache.lucene.util.TestRuleIgnoreAfterMaxFailures$1.evaluate(TestRuleIgnoreAfterMaxFailures.java:65)
at org.apache.lucene.util.TestRuleIgnoreTestSuites$1.evaluate(TestRuleIgnoreTestSuites.java:55)
at com.carrotsearch.randomizedtesting.rules.StatementAdapter.evaluate(StatementAdapter.java:36)
at com.carrotsearch.randomizedtesting.ThreadLeakControl$StatementRunner.run(ThreadLeakControl.java:367)
at java.lang.Thread.run(Thread.java:745)
Since it is a Unit Testing I have not used configuration Settings to create client.
is there a way to recreate the client Object with same configuration settings??
This is really weird issue.
My application.conf looks like
akka {
event-handlers = ["akka.event.slf4j.Slf4jEventHandler"]
loglevel = "INFO"
loggers = [akka.testkit.TestEventListener]
}
ec {
name = "myApp"
}
tenant {
assetsLocation: /Users
}
monitoring {
tenant.disk.schedule.seconds: 2
tenant.disk.threshold.percent: 80
}
and then I write my test as
#Test
public void testActorForNonExistentLocation() throws Exception {
final Map<String, String> configValues = Collections.singletonMap("tenant.assetsLocation",
"/non/existentLocation");
final Config config = mergeConfig(configValues);
new JavaTestKit(system) {{
assertEquals("system", system.name());
final Props props = TenantMonitorActor.props(config);
final ActorRef supervisor = system.actorOf(props, "supervisor");
new EventFilter<Void>(DiskException.class) {
#Override
protected Void run() {
supervisor.tell(new TenantMonitorMessage(), supervisor);
return null;
}
}.from("akka://system/user/supervisor/diskMonitor").occurrences(1).exec();
}};
}
and everything works. Then I write a new test.monitoring.conf as
include "application.conf"
akka {
loggers = [akka.testkit.TestEventListener]
}
and remove the line loggers = [akka.testkit.TestEventListener] from application.conf.
I see test failure as
INFO] [05/02/2015 17:20:11.301] [system-akka.actor.default-dispatcher-4] [akka://system/user/supervisor] Scheduling Disk Monitor
[INFO] [05/02/2015 17:20:11.304] [system-akka.actor.default-dispatcher-4] [akka://system/user/supervisor/diskMonitor] disk: /non/existentLocation, total space: 0, usable space: 0
[ERROR] [05/02/2015 17:20:11.309] [system-akka.actor.default-dispatcher-3] [akka://system/user/supervisor/diskMonitor] /non/existentLocation does not exists
com.self.monitoring.tenant.exception.DiskException: /non/existentLocation does not exists
at com.self.monitoring.tenant.DiskMonitorActor.validateAssetsDirectory(DiskMonitorActor.java:55)
at com.self.monitoring.tenant.DiskMonitorActor.onReceive(DiskMonitorActor.java:42)
at akka.actor.UntypedActor$$anonfun$receive$1.applyOrElse(UntypedActor.scala:167)
at akka.actor.Actor$class.aroundReceive(Actor.scala:465)
at akka.actor.UntypedActor.aroundReceive(UntypedActor.scala:97)
at akka.actor.ActorCell.receiveMessage(ActorCell.scala:516)
at akka.actor.ActorCell.invoke(ActorCell.scala:487)
at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:254)
at akka.dispatch.Mailbox.run(Mailbox.scala:221)
at akka.dispatch.Mailbox.exec(Mailbox.scala:231)
at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260)
at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339)
at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979)
at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107)
[INFO] [05/02/2015 17:20:13.308] [system-akka.actor.default-dispatcher-6] [akka://system/user/supervisor/diskMonitor] Message [com.self.monitoring.tenant.message.DiskMonitorMessage] from Actor[akka://system/deadLetters] to Actor[akka://system/user/supervisor/diskMonitor#935599425] was not delivered. [1] dead letters encountered. This logging can be turned off or adjusted with configuration settings 'akka.log-dead-letters' and 'akka.log-dead-letters-during-shutdown'.
java.lang.AssertionError: Timeout (3000 milliseconds) waiting for 1 messages on ErrorFilter(class com.self.monitoring.tenant.exception.DiskException,Some(akka://system/user/supervisor/diskMonitor),Left(),false)
at akka.testkit.EventFilter.intercept(TestEventListener.scala:104)
at akka.testkit.JavaTestKit$EventFilter.exec(JavaTestKit.java:626)
at com.self.monitoring.tenant.TestTenantMonitorActor$2.<init>(TestTenantMonitorActor.java:83)
at com.self.monitoring.tenant.TestTenantMonitorActor.testActorForNonExistentLocation(TestTenantMonitorActor.java:71)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:50)
at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:47)
at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
at org.junit.rules.TestWatcher$1.evaluate(TestWatcher.java:55)
at org.junit.rules.RunRules.evaluate(RunRules.java:20)
at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:325)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:78)
at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:57)
at org.junit.runners.ParentRunner$3.run(ParentRunner.java:290)
at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:71)
at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:288)
at org.junit.runners.ParentRunner.access$000(ParentRunner.java:58)
at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:268)
at org.junit.runners.ParentRunner.run(ParentRunner.java:363)
at org.junit.runner.JUnitCore.run(JUnitCore.java:137)
at com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:78)
at com.intellij.rt.execution.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:212)
at com.intellij.rt.execution.junit.JUnitStarter.main(JUnitStarter.java:68)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:140)
I also inspected if akka.testkit.TestEventListener is missing, but its there
#Test
public void testActorForNonExistentLocation() throws Exception {
final Map<String, String> configValues = Collections.singletonMap("tenant.assetsLocation",
"/non/existentLocation");
final Config config = mergeConfig(configValues);
System.out.println(config.getList("akka.loggers"));
.....
and I see that its present.
SimpleConfigList(["akka.testkit.TestEventListener"])
[INFO] [05/02/2015 17:22:09.703] [system-akka.actor.default-dispatcher-4] [akka://system/user/supervisor] Scheduling Disk Monitor
[INFO] [05/02/2015 17:22:09.705] [system-akka.actor.default-dispatcher-4] [akka://system/user/supervisor/diskMonitor] disk: /non/existentLocation, total space: 0, usable space: 0
[ERROR] [05/02/2015 17:22:09.709] [system-akka.actor.default-dispatcher-3] [akka://system/user/supervisor/diskMonitor] /non/existentLocation does not exists
What is the issue here?
I'm trying to implement custom pig storer that based on org.apache.pig.builtin.PigStorage . In my storer I want to calculate the real hdfs location from the string that sent at 'INTO' word and other job properties.
-- pig
STORE data INTO 'MyObject' using ...
I extend PigStorage and override setStoreLocation function, like this:
public void setStoreLocation(String location, Job job) throws IOException {
// location is 'MyObject' here
// my manipulations on location
String newLocation = basePath + '/' + someVar + '/' + location;
super.setStoreLocation(newLocation, job);
}
It works and write the file into the newLocation (/basePath/someVar/MyObject) , but I'm getting following message in the log, how I can to avoid it?
java.io.FileNotFoundException: File hdfs://myMachine:8020/user/hdfs/MyObject does not exist.
at org.apache.hadoop.hdfs.DistributedFileSystem.listStatusInternal(DistributedFileSystem.java:654)
at org.apache.hadoop.hdfs.DistributedFileSystem.access$600(DistributedFileSystem.java:102)
at org.apache.hadoop.hdfs.DistributedFileSystem$14.doCall(DistributedFileSystem.java:712)
at org.apache.hadoop.hdfs.DistributedFileSystem$14.doCall(DistributedFileSystem.java:708)
at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
at org.apache.hadoop.hdfs.DistributedFileSystem.listStatus(DistributedFileSystem.java:708)
at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.FileBasedOutputSizeReader.getOutputSize(FileBasedOutputSizeReader.java:65)
at org.apache.pig.tools.pigstats.JobStats.getOutputSize(JobStats.java:543)
at org.apache.pig.tools.pigstats.JobStats.addOneOutputStats(JobStats.java:567)
at org.apache.pig.tools.pigstats.JobStats.addOutputStatistics(JobStats.java:516)
at org.apache.pig.tools.pigstats.PigStatsUtil.addSuccessJobStats(PigStatsUtil.java:360)
at org.apache.pig.tools.pigstats.PigStatsUtil.accumulateStats(PigStatsUtil.java:257)
at org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher.launchPig(MapReduceLauncher.java:341)
at org.apache.pig.PigServer.launchPlan(PigServer.java:1322)
at org.apache.pig.PigServer.executeCompiledLogicalPlan(PigServer.java:1307)
at org.apache.pig.PigServer.execute(PigServer.java:1297)
at org.apache.pig.PigServer.executeBatch(PigServer.java:375)
at org.apache.pig.PigServer.executeBatch(PigServer.java:353)
at org.apache.pig.tools.grunt.GruntParser.executeBatch(GruntParser.java:140)
at org.apache.pig.tools.grunt.GruntParser.parseStopOnError(GruntParser.java:202)
at org.apache.pig.tools.grunt.GruntParser.parseStopOnError(GruntParser.java:173)
at org.apache.pig.tools.grunt.Grunt.exec(Grunt.java:84)
at org.apache.pig.Main.run(Main.java:478)
at org.apache.pig.PigRunner.run(PigRunner.java:49)
at org.apache.oozie.action.hadoop.PigMain.runPigJob(PigMain.java:286)
at org.apache.oozie.action.hadoop.PigMain.run(PigMain.java:226)
at org.apache.oozie.action.hadoop.LauncherMain.run(LauncherMain.java:38)
at org.apache.oozie.action.hadoop.PigMain.main(PigMain.java:76)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.oozie.action.hadoop.LauncherMapper.map(LauncherMapper.java:226)
at org.apache.hadoop.mapred.MapRunner.run(MapRunner.java:54)
at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:430)
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:342)
at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:168)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1548)
at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:163)
Looks like overriding of org.apache.pig.StoreFuncInterface#relToAbsPathForStoreLocation solves the problem:
#Override
public String relToAbsPathForStoreLocation(String location, Path curDir)
throws IOException {
return location;
}