我写了一个 lambda 如下。
处理程序.js
const aws = require('aws-sdk');
const dynamoDb = new aws.DynamoDB.DocumentClient();
const testHandler = async event => {
// some code
// ...
const user = await getUser(userId)
// ...
// some code
}
const promisify = foo => new Promise((resolve, reject) => {
foo((error, result) => {
if (error) {
reject(error)
} else {
resolve(result)
}
})
})
const getUser = (userId) => promisify(callback =>
dynamoDb.get({
TableName: 'test-table',
Key: {
"PK": `${userId}`,
"SK": `${userId}`
}
}, callback)) …Run Code Online (Sandbox Code Playgroud) 我是Apache Spark的新手,我正在尝试将一段简单的scala代码部署到Spark.
注意:我正在尝试连接到我通过我的java参数配置的现有正在运行的集群:spark.master=spark://MyHostName:7077
环境
sbt:0.13.8
码
import org.apache.spark.{SparkConf, SparkContext}
object HelloSpark {
def main(args: Array[String]) {
val logFile = "/README.md"
val conf = new SparkConf().setAppName("Simple Application")
val sc = new SparkContext(conf)
val logData = sc.textFile(logFile, 2).cache()
val numAs = logData.filter(line => line.contains("a")).count()
println("%s done!".format(numAs))
}
}
Run Code Online (Sandbox Code Playgroud)
build.sbt
name := "data-streamer210"
version := "1.0"
scalaVersion := "2.10.4" …Run Code Online (Sandbox Code Playgroud)