@Test
  def testEsRDDWriteWithDynamicMapping() {
    val doc1 = Map("one" -> null, "two" -> Set("2"), "three" -> (".", "..", "..."), "number" -> 1)
    val doc2 = Map("OTP" -> "Otopeni", "SFO" -> "San Fran", "number" -> 2)

    val target = wrapIndex("spark-test/scala-dyn-id-write")

    val pairRDD = sc.makeRDD(Seq((3, doc1), (4, doc2))).saveToEsWithMeta(target, cfg)

    assertEquals(2, EsSpark.esRDD(sc, target).count());
    assertTrue(RestUtils.exists(target + "/3"))
    assertTrue(RestUtils.exists(target + "/4"))

    assertThat(RestUtils.get(target + "/_search?"), containsString("SFO"))
  }

  @Test
  def testEsRDDWriteWithDynamicMapMapping() {
    val doc1 = Map("one" -> null, "two" -> Set("2"), "three" -> (".", "..", "..."), "number" -> 1)
    val doc2 = Map("OTP" -> "Otopeni", "SFO" -> "San Fran", "number" -> 2)

    val target = wrapIndex("spark-test/scala-dyn-id-write")

    val metadata1 = Map(ID -> 5, TTL -> "1d")
    val metadata2 = Map(ID -> 6, TTL -> "2d", VERSION -> "23")

    assertEquals(5, metadata1.getOrElse(ID, null))
    assertEquals(6, metadata2.getOrElse(ID, null))

    val pairRDD = sc.makeRDD(Seq((metadata1, doc1), (metadata2, doc2)))

    pairRDD.saveToEsWithMeta(target, cfg)

    assertTrue(RestUtils.exists(target + "/5"))
    assertTrue(RestUtils.exists(target + "/6"))

    assertThat(RestUtils.get(target + "/_search?"), containsString("SFO"))
  }

 spark-2.0.0-bin-hadoop2.6/bin/spark-shell --jars elasticsearch-hadoop-5.0.1/dist/elasticsearch-spark-20_2.11-5.0.1.jar

注意版本对应关系!

 

import org.apache.spark.SparkConf
import org.elasticsearch.spark._

sc.getConf.setMaster("local").setAppName("RDDTest").set("es.nodes", "127.0.0.1").set("es.index.auto.create", "true");
val numbers = Map("one" -> 1, "two" -> 2, "three" -> 3)
val airports = Map("OTP" -> "Otopeni", "SFO" -> "San Fran")
val r=sc.makeRDD(Seq(numbers, airports))
r.saveToEs("spark/data")

val doc1 = Map("one" -> null, "two" -> Set("2"), "three" -> (".", "..", "..."), "number" -> 1)
val doc2 = Map("OTP" -> "Otopeni", "SFO" -> "San Fran", "number" -> 2)
val pairRDD = sc.makeRDD(Seq((3, doc1), (4, doc2)))
pairRDD.saveToEsWithMeta("data/test")

可以看到ES请求data/test/3中id为3的文档,data/test/4中id为4的文档!

阿里云国内75折 回扣 微信号:monov8
阿里云国际,腾讯云国际,低至75折。AWS 93折 免费开户实名账号 代冲值 优惠多多 微信号:monov8 飞机:@monov6
标签: Hadoop