2017-04-19 7 views
3

나는 내 자바 프로그램을 초기화 할 때 다음과 같은 오류 스택 추적을 얻을 :java.lang.VerifyError : 클래스 com.fasterxml.jackson.module.scala.ser.ScalaIteratorSerializer는 마지막 방법은 withResolved 우선

Exception in thread "main" java.lang.VerifyError: class com.fasterxml.jackson.module.scala.ser.ScalaIteratorSerializer overrides final method withResolved.(Lcom/fasterxml/jackson/databind/BeanProperty;Lcom/fasterxml/jackson/databind/jsontype/TypeSerializer;Lcom/fasterxml/jackson/databind/JsonSerializer;)Lcom/fasterxml/jackson/databind/ser/std/AsArraySerializerBase; 
    at java.lang.ClassLoader.defineClass1(Native Method) 
    at java.lang.ClassLoader.defineClass(ClassLoader.java:763) 
    at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142) 
    at java.net.URLClassLoader.defineClass(URLClassLoader.java:467) 
    at java.net.URLClassLoader.access$100(URLClassLoader.java:73) 
    at java.net.URLClassLoader$1.run(URLClassLoader.java:368) 
    at java.net.URLClassLoader$1.run(URLClassLoader.java:362) 
    at java.security.AccessController.doPrivileged(Native Method) 
    at java.net.URLClassLoader.findClass(URLClassLoader.java:361) 
    at java.lang.ClassLoader.loadClass(ClassLoader.java:424) 
    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331) 
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357) 
    at com.fasterxml.jackson.module.scala.ser.IteratorSerializerModule$class.$init$(IteratorSerializerModule.scala:70) 
    at com.fasterxml.jackson.module.scala.DefaultScalaModule.<init>(DefaultScalaModule.scala:19) 
    at com.fasterxml.jackson.module.scala.DefaultScalaModule$.<init>(DefaultScalaModule.scala:35) 
    at com.fasterxml.jackson.module.scala.DefaultScalaModule$.<clinit>(DefaultScalaModule.scala) 
    at org.apache.spark.rdd.RDDOperationScope$.<init>(RDDOperationScope.scala:81) 
    at org.apache.spark.rdd.RDDOperationScope$.<clinit>(RDDOperationScope.scala) 
    at org.apache.spark.SparkContext.withScope(SparkContext.scala:714) 
    at org.apache.spark.SparkContext.hadoopRDD(SparkContext.scala:991) 
    at org.apache.spark.sql.execution.datasources.json.JSONRelation.org$apache$spark$sql$execution$datasources$json$JSONRelation$$createBaseRdd(JSONRelation.scala:101) 
    at org.apache.spark.sql.execution.datasources.json.JSONRelation$$anonfun$4$$anonfun$apply$1.apply(JSONRelation.scala:115) 
    at org.apache.spark.sql.execution.datasources.json.JSONRelation$$anonfun$4$$anonfun$apply$1.apply(JSONRelation.scala:115) 
    at scala.Option.getOrElse(Option.scala:120) 
    at org.apache.spark.sql.execution.datasources.json.JSONRelation$$anonfun$4.apply(JSONRelation.scala:115) 
    at org.apache.spark.sql.execution.datasources.json.JSONRelation$$anonfun$4.apply(JSONRelation.scala:109) 
    at scala.Option.getOrElse(Option.scala:120) 
    at org.apache.spark.sql.execution.datasources.json.JSONRelation.dataSchema$lzycompute(JSONRelation.scala:109) 
    at org.apache.spark.sql.execution.datasources.json.JSONRelation.dataSchema(JSONRelation.scala:108) 
    at org.apache.spark.sql.sources.HadoopFsRelation.schema$lzycompute(interfaces.scala:636) 
    at org.apache.spark.sql.sources.HadoopFsRelation.schema(interfaces.scala:635) 
    at org.apache.spark.sql.execution.datasources.LogicalRelation.<init>(LogicalRelation.scala:37) 
    at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:125) 
    at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:109) 
    at org.apache.spark.sql.DataFrameReader.json(DataFrameReader.scala:244) 
    at org.test.loader.Loader.readJsonFile(Loader.java:73) 
    at org.test.loader.Loader.run(Loader.java:50) 
    at org.test.loader.LoadCatalogue.main(LoadCatalogue.java:22) 

무엇을 이해하지 수를 이 오류의 원인입니다. Maven 프로젝트에서 사용하는 라이브러리 간의 충돌과 관련이 있습니까?

<?xml version="1.0" encoding="UTF-8"?> 
<project xmlns="http://maven.apache.org/POM/4.0.0" 
     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 
    <modelVersion>4.0.0</modelVersion> 

    <groupId>org.test</groupId> 
    <artifactId>test-api</artifactId> 
    <version>1.0-SNAPSHOT</version> 

    <properties> 
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> 
     <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> 
     <java.version>1.8</java.version> 
     <spark.version>1.6.1</spark.version> 
     <es.version>2.2.0</es.version> 
    </properties> 

    <parent> 
     <groupId>org.springframework.boot</groupId> 
     <artifactId>spring-boot-starter-parent</artifactId> 
     <version>1.4.0.BUILD-SNAPSHOT</version> 
    </parent> 

    <dependencies> 
       <dependency> 
     <groupId>org.springframework.boot</groupId> 
     <artifactId>spring-boot-starter-web</artifactId> 
    </dependency> 
    <dependency> 
     <groupId>org.springframework.boot</groupId> 
     <artifactId>spring-boot-starter-test</artifactId> 
     <scope>test</scope> 
    </dependency> 
    <dependency> 
     <groupId>org.springframework.data</groupId> 
     <artifactId>spring-data-elasticsearch</artifactId> 
     <version>2.0.0.RELEASE</version> 
    </dependency> 
    <dependency> 
     <groupId>com.github.spullara.mustache.java</groupId> 
     <artifactId>compiler</artifactId> 
     <version>0.9.1</version> 
    </dependency> 
    <dependency> 
     <groupId>org.elasticsearch</groupId> 
     <artifactId>elasticsearch</artifactId> 
     <version>${es.version}</version> 
    </dependency> 
    <dependency> 
     <groupId>joda-time</groupId> 
     <artifactId>joda-time</artifactId> 
     <version>2.9.3</version> 
    </dependency> 
    <dependency> 
     <groupId>org.apache.jena</groupId> 
     <artifactId>jena-tdb</artifactId> 
     <version>3.0.0</version> 
     <scope>compile</scope> 
    </dependency> 
    <dependency> 
     <groupId>org.json</groupId> 
     <artifactId>json</artifactId> 
     <version>20160810</version> 
     <scope>compile</scope> 
    </dependency> 
    <dependency> 
     <groupId>org.apache.spark</groupId> 
     <artifactId>spark-sql_2.10</artifactId> 
     <version>${spark.version}</version> 
    </dependency> 
    <dependency> 
     <groupId>org.jsoup</groupId> 
     <artifactId>jsoup</artifactId> 
     <version>1.10.2</version> 
    </dependency> 
    </dependencies> 

    <repositories> 
     <repository> 
      <id>spring-releases</id> 
      <url>https://repo.spring.io/libs-release</url> 
     </repository> 
     <repository> 
      <id>spring-snapshots</id> 
      <name>Spring Snapshots</name> 
      <url>https://repo.spring.io/libs-snapshot</url> 
      <snapshots> 
       <enabled>true</enabled> 
      </snapshots> 
     </repository> 
    </repositories> 
    <pluginRepositories> 
     <pluginRepository> 
      <id>spring-releases</id> 
      <url>https://repo.spring.io/libs-release</url> 
     </pluginRepository> 
    </pluginRepositories> 

</project> 
+0

스프링 부트 버전으로 1.4.0.BUILD-SNAPSHOT을 (를) 사용하고 있습니다. 확실히 의존성 안정성에 좋은 것은 아닙니다. 거기에 어떤 이유가 있습니까? 1.4.5는 예를 들어 출시되었습니다. – Tome

+0

@Tome : '1.4.5.BUILD-SNAPSHOT'으로 전환했지만 오류는 같습니다. – Dinosaurius

+0

따라서 명확하게 의존성 충돌이 있습니다. 예를 들어 힌트를 보려면 다음을 참조하십시오. https://github.com/FasterXML/jackson-module-scala/issues/214 – Tome

답변

2

문제가 해결됩니다 다음 종속성을 추가 : 나는 파일 pom.xml를 제공 그냥 경우

.

<dependency> 
     <groupId>com.fasterxml.jackson.core</groupId> 
     <artifactId>jackson-databind</artifactId> 
     <version>2.6.7.1</version> 
    </dependency> 

    <dependency> 
     <groupId>com.fasterxml.jackson.module</groupId> 
     <artifactId>jackson-module-scala_2.10</artifactId> 
     <version>2.6.7.1</version> 
    </dependency>