cassandra 在window上的demo

Cassandra   window使用

1.        下载:http://cassandra.apache.org/download/

2.        解压后,bin目录下,cassandra.bat 直接启动。 Cassandra-cli.bat进入cassandra。

3.        安装python2.7.10 版本太高,可能会不兼容,安装python 使用 CQL 操作cassandra数据库。

4.        配置环境变量后,以管理员方式运行 cmd  :

powershell Set-ExecutionPolicy Unrestricted    成功如下:

cassandra 在window上的demo

5.   cmd  cqlsh.bat进入cql编辑;(没有安装python 无法使用cqlsh.bat)

CREATE KEYSPACE IF NOT EXISTS pimin_net

WITH REPLICATION = {'class': 'SimpleStrategy','replication_factor':1};

USE pimin_net;

CREATE TABLE IF NOT EXISTS users (

id uuid,

first_name varchar,

last_name varchar,

age int,

emails set<text>,

avatar blob,

PRIMARY KEY (id)

);

SELECT * FROM users;

用户表(users)包含了主键、firest_name、last_name、年龄、多个邮箱地址以及头像。关于使用java驱动,建议使用eclipse+maven创建,比如我们创建一个cassandra.test的工程。在pom.xml里面加入

<dependency>
<groupId>com.datastax.cassandra</groupId>
<artifactId>cassandra-driver-core</artifactId>
<version>2.1.0</version>
</dependency>

1

2

3

4

5

<dependency>

<groupId>com.datastax.cassandra</groupId>

<artifactId>cassandra-driver-core</artifactId>

<version>2.1.0</version>

</dependency>

如果环境不允许使用maven,那么就自己找依赖包吧:cassandra-driver-core-2.1.0.jar、netty-3.9.0.Final.jar、guava-16.0.1.jar、metrics-core-3.0.2.jar和slf4j-api-1.7.5.jar。

解压目录 lib和 apache-cassandra-2.1.16\tools\lib 下,有jar包。

下面是所有java测试代码:实现连接Cassandra集群,新增数据,查询数据。其他的情况类似。

如果需要远程连接,那么cassandra.yaml里面的rpc_address就需要改变为您当前环境实际的IP地址,否则会报错! 本地:client.connect("127.0.0.1");

package cassandra;

import java.io.File;

import java.io.FileInputStream;

import java.io.FileNotFoundException;

import java.io.FileOutputStream;

import java.io.IOException;

import java.nio.ByteBuffer;

import java.nio.channels.FileChannel;

import java.util.HashSet;

import java.util.Set;

import java.util.UUID;

import com.datastax.driver.core.BoundStatement;

import com.datastax.driver.core.Cluster;

import com.datastax.driver.core.Host;

import com.datastax.driver.core.Metadata;

import com.datastax.driver.core.PreparedStatement;

import com.datastax.driver.core.ResultSet;

import com.datastax.driver.core.Row;

import com.datastax.driver.core.Session;

public class TestJava {

private Cluster cluster;

private Session session;

public Cluster getCluster() {

return cluster;

}

public void setCluster(Cluster cluster) {

this.cluster = cluster;

}

public Session getSession() {

return session;

}

public void setSession(Session session) {

this.session = session;

}

/**

* 连接节点

*

* @param node

*/

public void connect(String node) {

cluster = Cluster.builder().addContactPoint(node).build();

Metadata metadata = cluster.getMetadata();

System.out.printf("Connected to cluster: %s\n",      metadata.getClusterName());

for (Host host : metadata.getAllHosts()) {

System.out.printf("Datatacenter: %s; Host: %s; Rack: %s\n",      host.getDatacenter(), host.getAddress(), host.getRack());

}

this.session = cluster.connect();

}

public void insertData() {

PreparedStatement insertStatement = getSession().prepare(

"INSERT INTO pimin_net.users "

+ "(id, first_name, last_name, age, emails,avatar) "

+ "VALUES (?, ?, ?, ?, ?, ?);");

BoundStatement boundStatement = new BoundStatement(insertStatement);

Set<String> emails = new HashSet<String>();

emails.add("xxx@qq.com");

emails.add("xxx@163.com");

java.nio.ByteBuffer avatar = null;

try {

avatar = toByteBuffer("f:\\user.png");

avatar.flip();

System.out.println("头像大小:" + avatar.capacity());

} catch (IOException e) {

// TODO Auto-generated catch block

e.printStackTrace();

}

getSession().execute(boundStatement.bind(UUID.fromString("756716f7-2e54-4715-9f00-91dcbea6cf50"),

"pi", "min", 10, emails, avatar));

}

public void loadData() {

ResultSet resultSet = getSession().execute(

"SELECT first_name,last_name,age,avatar FROM pimin_net.users;");

System.out

.println(String

.format("%-30s\t%-20s\t%-20s\n%s", "first_name","last_name", "age",

"-------------------------------+-----------------------+--------------------"));

for (Row row : resultSet) {

System.out.println(String.format("%-30s\t%-20s\t%-20s",

row.getString("first_name"), row.getString("last_name"),

row.getInt("age")));

ByteBuffer byteBuffer = row.getBytes("avatar");

System.out.println("头像大小:" + (byteBuffer.limit() - byteBuffer.position()));

FileOutputStream fileOutputStream = null;

try {

fileOutputStream = new FileOutputStream("f:\\2.png");

} catch (FileNotFoundException e) {

// TODO Auto-generated catch block

e.printStackTrace();

}

try {

fileOutputStream.write(byteBuffer.array(),

byteBuffer.position(),

byteBuffer.limit() - byteBuffer.position());

fileOutputStream.close();

} catch (IOException e) {

// TODO Auto-generated catch block

e.printStackTrace();

}

}

System.out.println();

}

public void close() {

cluster.close();

}

/**

* 读取文件

*

* @param filename

* @return

* @throws IOException

*/

public static ByteBuffer toByteBuffer(String filename) throws IOException {

File f = new File(filename);

if (!f.exists()) {

throw new FileNotFoundException(filename);

}

FileChannel channel = null;

FileInputStream fs = null;

try {

fs = new FileInputStream(f);

channel = fs.getChannel();

ByteBuffer byteBuffer = ByteBuffer.allocate((int) channel.size());

while ((channel.read(byteBuffer)) > 0) {

// do nothing

// System.out.println("reading");

}

return byteBuffer;

} catch (IOException e) {

e.printStackTrace();

throw e;

} finally {

try {

channel.close();

} catch (IOException e) {

e.printStackTrace();

}

try {

fs.close();

} catch (IOException e) {

e.printStackTrace();

}

}

}

public static void main(String[] args) {

TestJava client = new TestJava();

client.connect("127.0.0.1");

client.insertData();

client.loadData();

client.session.close();

client.close();

}

}

上一篇:Docker run命令详解 转


下一篇:Kernel Functions-Introduction to SVM Kernel & Examples - DataFlair