Geotrellis中栅格常用方法

1、判断点在区域内,返回值为true或false

rasterExtent.extent.intersects(x,y)

 2、求出经纬度对应的栅格索引

 val index = rasterExtent.mapXToGrid(x) + rasterExtent.mapYToGrid(y) * cols

3、使用栅格Extent的foreach方法,栅格化面

//使用extent.foreach 方法来栅格化polygon
rasterExtent.foreach(polygon) { (col, row) =>
  targetBand.set(col, row, targetBand.get(col, row) + 1)
}

4、将栅格转换成文件

tile.renderPng(colorMap).write("D:/test1.png")

5、将tile转换成grid数组

 //分割tif:把tile分成28个块,每块的像素为100*100
 //布局中的列数
 //布局中的行数
 //每个块中的像素列数
 //每个块中的像素行数
 val tl = TileLayout(7, 4, 100, 100)
 val ld = LayoutDefinition(extent, tl) //坐标与图片大小传入,形成图层的基础定义
 val gridBounds = ld.mapTransform(ptextent) //转成grid,其实就是二维数组

6、创建栅格数组:创建3600*3600的栅格

  val targetBand = IntArrayTile.ofDim(3600, 3600)

7、坐标转换:经纬度转换成墨卡托投影

val point = Point((113.3461,27.13135))

val xy = point.reproject(LatLng,WebMercator);

8、ETL练习

package com.azavea.geotrellis.tutorial

import geotrellis.geotools._
import geotrellis.proj4.WebMercator
import geotrellis.raster._
import geotrellis.spark._
import geotrellis.spark.io._
import geotrellis.spark.io.hadoop._

import org.apache.log4j.Logger
import org.apache.spark.rdd.RDD
import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.geotools.gce.geotiff._
import org.opengis.parameter.GeneralParameterValue


object EtlExercise {

  val logger = Logger.getLogger(EtlExercise.getClass)

  /**
    * Dump a layer to disk.
    */
  def dump(rdd: RDD[(SpatialKey, MultibandTile)] with Metadata[TileLayerMetadata[SpatialKey]], stem: String) = {
    val mt = rdd.metadata.mapTransform

    rdd.collect.foreach({ case (k, v) =>
      val extent = mt(k)
      val pr = ProjectedRaster(Raster(v, extent), WebMercator)
      val gc = pr.toGridCoverage2D
      val writer = new GeoTiffWriter(new java.io.File(s"/tmp/tif/${stem}-${System.currentTimeMillis}.tif"))
      writer.write(gc, Array.empty[GeneralParameterValue])
    })
  }

  /**
    * MAIN
    */
  def main(args: Array[String]) : Unit = {

    /* Command line arguments */
    if (args.length < 3) System.exit(-1)
    val hdfsUri = args(0)
    val layerName = args(1)
    val zoomLevel = args(2).toInt

    /* Spark context */
    val sparkConf = new SparkConf()
      .setAppName("EtlExercise")
      .set("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
    val sparkContext = new SparkContext(sparkConf)
    implicit val sc = sparkContext

    logger.info(s"Reading ${layerName}:${zoomLevel} from HDFS")
    val rdd0 = {
      val inLayerId = LayerId(layerName, zoomLevel)
      require(HadoopAttributeStore(hdfsUri).layerExists(inLayerId))
      HadoopLayerReader(hdfsUri).read[SpatialKey, MultibandTile, TileLayerMetadata[SpatialKey]](inLayerId)
    }

    logger.info("Histogram Equalization")
    val rdd1 = rdd0.equalize

    logger.info("Sigmoidal contrast")
    val rdd2 = ContextRDD(rdd0.sigmoidal(.5, 10), rdd0.metadata)

    logger.info("Dumping layers to disk")
    dump(rdd0, "raw")
    dump(rdd1, "equal")
    dump(rdd2, "sigmoidal")
  }

}

 

你可能感兴趣的:(geotrellis)