Add the comment, and removed some unnecessary code

This commit is contained in:
baimei 2013-09-03 08:35:32 +08:00
parent 9c64f3a89c
commit 0f92754860
18 changed files with 1208 additions and 2479 deletions

View File

@ -1,10 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.web.container"/>
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.module.container"/>
<classpathentry kind="con" path="org.eclipse.jdt.junit.JUNIT_CONTAINER/4"/>
<classpathentry kind="con" path="org.eclipse.jst.server.core.container/org.eclipse.jst.server.tomcat.runtimeTarget/_home_work_software_apache-tomcat-7.0.41"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="output" path="bin"/>
</classpath>
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src"/>
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.web.container"/>
<classpathentry kind="con" path="org.eclipse.jst.j2ee.internal.module.container"/>
<classpathentry kind="con" path="org.eclipse.jdt.junit.JUNIT_CONTAINER/4"/>
<classpathentry kind="con" path="org.eclipse.jst.server.core.container/org.eclipse.jst.server.tomcat.runtimeTarget/_home_work_software_apache-tomcat-7.0.41"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="con" path="org.eclipse.jst.server.core.container/org.eclipse.jst.server.tomcat.runtimeTarget/Apache Tomcat v7.0"/>
<classpathentry kind="output" path="bin"/>
</classpath>

View File

@ -149,7 +149,7 @@
</fieldset>
<div class="form-actions">
<button class="btn btn-small btn-success icon-plus icon-white" type="submit"><i class="icon-plus icon-white"></i> Add</button>
<button class="btn btn-small btn-success " type="submit"><i class="icon-plus icon-white"></i> Add</button>
<button class="btn">cancel</button>
</div>

View File

@ -1,28 +1,23 @@
/**
* @Project: javaHiveAdimin
* @Title: DatabaseAction.java
* @Package org.hiveadmin.hive.action
* @Description: TODO
* @author wangjie wangjie370124@163.com
* @date Jul 19, 2013 4:31:06 PM
* @version V1.0
*/
* @Project: javaHiveAdimin
* @Title: DatabaseAction.java
* @Package org.hiveadmin.hive.action
* @Description: TODO
* @author wangjie wangjie370124@163.com
* @date Jul 19, 2013 4:31:06 PM
* @version V1.0
*/
package org.hiveadmin.hive.action;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import javax.servlet.ServletContext;
import org.apache.log4j.Logger;
import org.apache.struts2.ServletActionContext;
import org.hiveadmin.hive.beans.HistoryRecord;
import org.hiveadmin.hive.service.HiveDatabaseService;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import com.opensymphony.xwork2.ActionContext;
import com.opensymphony.xwork2.ActionSupport;
/**
@ -33,11 +28,8 @@ import com.opensymphony.xwork2.ActionSupport;
*/
@Component
@Scope("session")
public class DatabaseAction extends ActionSupport{
/**
*
*/
public class DatabaseAction extends ActionSupport {
private static final long serialVersionUID = 1L;
private String databaseName;
private String description;
@ -45,39 +37,47 @@ public class DatabaseAction extends ActionSupport{
private String type;
private String hdfspath;
private Map<String, String> dbproperties;
private HiveDatabaseService hiveDatabaseService;
private Logger log = Logger.getLogger(DatabaseAction.class);
private List<String> databaseList;
public List<String> getDatabaseList() {
return databaseList;
}
public void setDatabaseList(List<String> databaseList) {
this.databaseList = databaseList;
}
public String getErrorMsg() {
return errorMsg;
}
public void setErrorMsg(String errorMsg) {
this.errorMsg = errorMsg;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getHdfspath() {
return hdfspath;
}
public void setHdfspath(String hdfspath) {
this.hdfspath = hdfspath;
}
public Map<String, String> getDbproperties() {
return dbproperties;
}
public void setDbproperties(Map<String, String> dbproperties) {
this.dbproperties = dbproperties;
}
@ -85,57 +85,70 @@ public class DatabaseAction extends ActionSupport{
public HiveDatabaseService getHiveDatabaseService() {
return hiveDatabaseService;
}
@Resource(name="hiveDatabaseServiceImpl")
@Resource(name = "hiveDatabaseServiceImpl")
public void setHiveDatabaseService(HiveDatabaseService hiveDatabaseService) {
this.hiveDatabaseService = hiveDatabaseService;
}
public String getDatabaseName() {
return databaseName;
}
public void setDatabaseName(String databaseName) {
this.databaseName = databaseName;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String create(){
public String create() {
try {
HistoryRecord historyRecord = new HistoryRecord();
historyRecord.setOp_user_name((String) ServletActionContext.getContext().getSession().get("user"));
historyRecord.setOp_desc("create database. [database:"+databaseName+"]");
historyRecord.setOp_user_name((String) ServletActionContext
.getContext().getSession().get("user"));
historyRecord.setOp_desc("create database. [database:"
+ databaseName + "]");
hiveDatabaseService.setHistoryRecord(historyRecord);
hiveDatabaseService.createDatebase(databaseName, description, hdfspath, dbproperties);
hiveDatabaseService.createDatebase(databaseName, description,
hdfspath, dbproperties);
} catch (Exception e) {
this.errorMsg = e.getMessage();
log .info("create database failed.");
log.info("create database failed.");
return ERROR;
}
return SUCCESS;
}
public String getHiveDatabaseList(){
public String getHiveDatabaseList() {
try {
HistoryRecord historyRecord = new HistoryRecord();
historyRecord.setOp_user_name((String) ServletActionContext.getContext().getSession().get("user"));
historyRecord.setOp_user_name((String) ServletActionContext
.getContext().getSession().get("user"));
historyRecord.setOp_desc("get databases list.");
hiveDatabaseService.setHistoryRecord(historyRecord);
this.databaseList = hiveDatabaseService.listDatabase();
} catch (Exception e) {
this.errorMsg = e.getMessage();
return ERROR;
}
return SUCCESS;
}
public String deleteDatabase(){
public String deleteDatabase() {
try {
HistoryRecord record = new HistoryRecord();
record.setOp_user_name((String) ServletActionContext.getContext().getSession().get("user"));
record.setOp_desc("detele database. [database:"+databaseName+"]");
record.setOp_user_name((String) ServletActionContext.getContext()
.getSession().get("user"));
record.setOp_desc("detele database. [database:" + databaseName
+ "]");
hiveDatabaseService.setHistoryRecord(record);
hiveDatabaseService.deleteDatebase(databaseName, type);
} catch (Exception e) {

View File

@ -1,207 +0,0 @@
package org.hiveadmin.hive.action;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.hiveadmin.hive.beans.Columns;
import org.hiveadmin.hive.service.impl.*;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import com.opensymphony.xwork2.ActionSupport;
@Component
@Scope("session")
public class HIveViewAction extends ActionSupport {
/**
*
*/
private static final long serialVersionUID = 1379532893454739772L;
Logger log = Logger.getLogger(HIveViewAction.class);
private HiveViewServiceImpl server = new HiveViewServiceImpl();
private boolean if_not_exists=false;
private String view_name;
private List<Columns> columns;
private String comment;
private List<Columns> tblproperties_spec;
private String select_sentance;
private List<Columns> table_properties;
public String createView() {
boolean result = false;
if (view_name == null || view_name.equals("")) {
log.error("View name is null!");
result = true;
}
if (select_sentance == null || select_sentance.equals("")) {
log.error("Select_sentance is null!");
result = true;
}
if (result) {
return ERROR;
}
try {
server.createView(if_not_exists, view_name,columns,comment,tblproperties_spec,select_sentance);
log.info("View has been created");
return SUCCESS;
} catch (Exception e) {
e.printStackTrace();
log.error(e.toString());
return ERROR;
}
}
public String dropView() {
if (view_name == null || view_name.equals("")) {
log.error("View name is null!");
return ERROR;
}
try {
server.dropView(if_not_exists, view_name);
log.info("View has been dropped");
return SUCCESS;
} catch (Exception e) {
e.printStackTrace();
log.error(e.toString());
return ERROR;
}
}
public String alterViewProperties() {
boolean result = false;
if (view_name == null || view_name.equals("")) {
log.error("View name is null!");
result = true;
}
if (table_properties.isEmpty()) {
log.error("Tblproperties_spec is null!");
result = true;
}
if (result) {
return ERROR;
}
try {
server.alterViewProperties(view_name,table_properties);
log.info("View has been altered");
return SUCCESS;
} catch (Exception e) {
e.printStackTrace();
log.error(e.toString());
return ERROR;
}
}
public String alterViewAsSelect() {
boolean result = false;
if (view_name == null || view_name.equals("")) {
log.error("View name is null!");
result = true;
}
if (select_sentance == null || select_sentance.equals("")) {
log.error("Select_statement is null!");
result = true;
}
if (result) {
return ERROR;
}
try {
server.alterViewAsSelect(view_name, select_sentance);
log.info("View has been altered");
return SUCCESS;
} catch (Exception e) {
e.printStackTrace();
log.error(e.toString());
return ERROR;
}
}
public HiveViewServiceImpl getServer() {
return server;
}
public void setServer(HiveViewServiceImpl server) {
this.server = server;
}
public boolean isIf_not_exists() {
return if_not_exists;
}
public void setIf_not_exists(boolean if_not_exists) {
this.if_not_exists = if_not_exists;
}
public String getView_name() {
return view_name;
}
public void setView_name(String view_name) {
this.view_name = view_name;
}
public Logger getLog() {
return log;
}
public void setLog(Logger log) {
this.log = log;
}
public List<Columns> getColumns() {
return columns;
}
public void setColumns(List<Columns> columns) {
this.columns = columns;
}
public String getComment() {
return comment;
}
public void setComment(String comment) {
this.comment = comment;
}
public List<Columns> getTblproperties_spec() {
return tblproperties_spec;
}
public void setTblproperties_spec(List<Columns> tblproperties_spec) {
this.tblproperties_spec = tblproperties_spec;
}
public String getSelect_sentance() {
return select_sentance;
}
public void setSelect_sentance(String select_sentance) {
this.select_sentance = select_sentance;
}
public List<Columns> getTable_properties() {
return table_properties;
}
public void setTable_properties(List<Columns> table_properties) {
this.table_properties = table_properties;
}
public static long getSerialversionuid() {
return serialVersionUID;
}
}

View File

@ -1,250 +0,0 @@
package org.hiveadmin.hive.action;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import org.hiveadmin.hive.beans.Columns;
import org.hiveadmin.hive.service.impl.HiveIndexServiceImpl;
import org.hiveadmin.hive.service.impl.HiveViewServiceImpl;
import org.springframework.context.annotation.Scope;
import org.springframework.stereotype.Component;
import com.opensymphony.xwork2.ActionSupport;
@Component
@Scope("session")
public class HiveIndexAction extends ActionSupport {
private HiveIndexServiceImpl server = new HiveIndexServiceImpl();
private Logger log = Logger.getLogger(this.getClass());
private String index_name = null;
private String base_table_name = null;
private List<String> column_spec;
private String index_type = null;
private boolean with_deferred_rebuild = false;
private List<Columns> properties_spec;
private String index_table_name = null;
private String row_format = null;
private String stored_as = null;
private String stored_by_with = null;
private String hdfs_path = null;
private List<Columns> tblproperties;
private String index_comment = null;
private boolean if_exists=false;
private String drop_index_name;
private String drop_table_name;
public String createIndex() {
boolean result = false;
if (index_name == null || index_name.equals("")) {
log.error("Index name is null");
result = false;
}
if (base_table_name == null || base_table_name.equals("")) {
log.error("Table name is null");
result = false;
}
if (index_type == null || index_type.equals("")) {
log.error("Index name is null");
result = false;
}
if (result) {
return ERROR;
}
try {
server.createIndex(index_name, base_table_name, column_spec,
index_type, with_deferred_rebuild, properties_spec,
index_table_name, row_format, stored_as, stored_by_with,
hdfs_path, tblproperties, index_comment);
log.info("Index has been created");
return SUCCESS;
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
log.error(e.toString());
return ERROR;
}
}
public String dropIndex(){
boolean result = false;
if (drop_index_name == null || drop_index_name.equals("")) {
log.error("Index name is null");
result = false;
}
if (drop_table_name == null || drop_table_name.equals("")) {
log.error("Table name is null");
result = false;
}
if (result) {
return ERROR;
}
try {
server.dropIndex(if_exists, drop_index_name, drop_index_name);
log.info("Index has been created");
return SUCCESS;
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
log.error(e.toString());
return ERROR;
}
}
public HiveIndexServiceImpl getServer() {
return server;
}
public void setServer(HiveIndexServiceImpl server) {
this.server = server;
}
public Logger getLog() {
return log;
}
public void setLog(Logger log) {
this.log = log;
}
public String getIndex_name() {
return index_name;
}
public void setIndex_name(String index_name) {
this.index_name = index_name;
}
public String getBase_table_name() {
return base_table_name;
}
public void setBase_table_name(String base_table_name) {
this.base_table_name = base_table_name;
}
public List<String> getColumn_spec() {
return column_spec;
}
public void setColumn_spec(List<String> column_spec) {
this.column_spec = column_spec;
}
public String getIndex_type() {
return index_type;
}
public void setIndex_type(String index_type) {
this.index_type = index_type;
}
public boolean isWith_deferred_rebuild() {
return with_deferred_rebuild;
}
public void setWith_deferred_rebuild(boolean with_deferred_rebuild) {
this.with_deferred_rebuild = with_deferred_rebuild;
}
public List<Columns> getProperties_spec() {
return properties_spec;
}
public void setProperties_spec(List<Columns> properties_spec) {
this.properties_spec = properties_spec;
}
public String getIndex_table_name() {
return index_table_name;
}
public void setIndex_table_name(String index_table_name) {
this.index_table_name = index_table_name;
}
public String getRow_format() {
return row_format;
}
public void setRow_format(String row_format) {
this.row_format = row_format;
}
public String getStored_as() {
return stored_as;
}
public void setStored_as(String stored_as) {
this.stored_as = stored_as;
}
public String getStored_by_with() {
return stored_by_with;
}
public void setStored_by_with(String stored_by_with) {
this.stored_by_with = stored_by_with;
}
public String getHdfs_path() {
return hdfs_path;
}
public void setHdfs_path(String hdfs_path) {
this.hdfs_path = hdfs_path;
}
public List<Columns> getTblproperties() {
return tblproperties;
}
public void setTblproperties(List<Columns> tblproperties) {
this.tblproperties = tblproperties;
}
public String getIndex_comment() {
return index_comment;
}
public void setIndex_comment(String index_comment) {
this.index_comment = index_comment;
}
public boolean isIf_exists() {
return if_exists;
}
public void setIf_exists(boolean if_exists) {
this.if_exists = if_exists;
}
public String getDrop_index_name() {
return drop_index_name;
}
public void setDrop_index_name(String drop_index_name) {
this.drop_index_name = drop_index_name;
}
public String getDrop_table_name() {
return drop_table_name;
}
public void setDrop_table_name(String drop_table_name) {
this.drop_table_name = drop_table_name;
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,45 +1,95 @@
/**
* @Project: javaHiveAdimin
* @Title: Columns.java
* @Package org.hiveadmin.hive.beans
* @author xuliang,xuliang12@octaix.iscas.ac.cn
* @Copyright: 2013 www.1000chi.comInc. All rights reserved.
* @version V1.0
*/
package org.hiveadmin.hive.beans;
/**
* @ClassName Columns
* @Description 数据库列类型
*/
/**
* Columns is a class which describe the hive table's column.
* This class has three properties to store the data which from the table's column<p>
* @author xuliang,xuliang12@octaix.iscas.ac.cn
*/
public class Columns {
/**
* cols_name:this property is to describe column's name<br>
*/
private String cols_name;
/**
* cols_type:this property is to describe the column's type <br>
*/
private String cols_type;
/**
* cols_comment:this property is to describe the column's comment<br>
*/
private String cols_comment="";
/**
* This method can return the column's name.
* @return String
*/
public String getCols_name() {
return cols_name;
}
/**
* This method can reset the column's name.
* @param cols_name
*/
public void setCols_name(String cols_name) {
this.cols_name = cols_name;
}
/**
* This method can return the column's type.
* @return String
*/
public String getCols_type() {
return cols_type;
}
/**
* This method can reset the column's type
* @param cols_type
*/
public void setCols_type(String cols_type) {
this.cols_type = cols_type;
}
/**
* This method can return the content of this column's comment.
* @return String
*/
public String getCols_comment() {
return cols_comment;
}
/**
* This method can reset the coumn's comment.
* @param cols_comment
*/
public void setCols_comment(String cols_comment) {
this.cols_comment = cols_comment;
}
/**
* This method can create a new columns object.
*/
public Columns(){}
}

View File

@ -12,22 +12,49 @@ package org.hiveadmin.hive.beans;
import java.sql.Timestamp;
/**
* @ClassName HistoryRecord
* This class is to describe the history record of operation.
* @Description TODO
* @author wangjie wangjie370124@163.com
* @date Jul 19, 2013 8:41:27 PM
*/
public class HistoryRecord {
/**
* op_record_id:this property is the id of this operation
*/
private int op_record_id;
/**
* op_user_name:this property is the name of user who operate
*/
private String op_user_name;
/**
* op_desc:this property is the description about this operation
*/
private String op_desc;
/**
* op_sql:this property is the SQL sentence
*/
private String op_sql;
/**
* op_res:this property is the result status
*/
private boolean op_res;
/**
* op_ts:this property is the time of this operation occur
*/
private Timestamp op_ts;
/**
* status:this property is status of operation
*/
private String status;
/**
* resultfile:this property is the file which contains the result
*/
private String resultfile;
/**
* This method is to create an object of HistoryRecord
*/
public HistoryRecord(){
this.op_user_name="";
this.op_desc="";

View File

@ -1,92 +0,0 @@
/**
* @Project: javaHiveAdimin
* @Title: HiveTableBean.java
* @Package org.hiveadmin.hive.beans
* @Description: TODO
* @author wangjie wangjie370124@163.com
* @date Aug 18, 2013 3:54:07 PM
* @version V1.0
*/
package org.hiveadmin.hive.beans;
import java.util.ArrayList;
import java.util.List;
/**
* @ClassName HiveTableBean
* @Description TODO
* @author wangjie wangjie370124@163.com
* @date Aug 18, 2013 3:54:07 PM
*/
public class HiveTableBean {
private String database;
private boolean tableType;
private String tablename;
private boolean createifnotexist;
private String tableComment;
private String partitionedby;
private List<HiveTableFiledBean> fieldList;
private List<String> primaryTypeList;
public HiveTableBean(){
primaryTypeList = new ArrayList<>();
primaryTypeList.add("TINYINT");
primaryTypeList.add("SMALLINT");
primaryTypeList.add("INT");
primaryTypeList.add("BIGINT");
primaryTypeList.add("BOOLEAN");
primaryTypeList.add("FLOAT");
primaryTypeList.add("DOUBLE");
primaryTypeList.add("STRING");
primaryTypeList.add("BINARY");
primaryTypeList.add("TIMESTAMP");
primaryTypeList.add("DECIMAL");
}
public boolean isTableType() {
return tableType;
}
public void setTableType(boolean tableType) {
this.tableType = tableType;
}
public String getDatabase() {
return database;
}
public void setDatabase(String database) {
this.database = database;
}
public String getTablename() {
return tablename;
}
public void setTablename(String tablename) {
this.tablename = tablename;
}
public boolean isCreateifnotexist() {
return createifnotexist;
}
public void setCreateifnotexist(boolean createifnotexist) {
this.createifnotexist = createifnotexist;
}
public String getTableComment() {
return tableComment;
}
public void setTableComment(String tableComment) {
this.tableComment = tableComment;
}
public String getPartitionedby() {
return partitionedby;
}
public void setPartitionedby(String partitionedby) {
this.partitionedby = partitionedby;
}
public List<HiveTableFiledBean> getFieldList() {
return fieldList;
}
public void setFieldList(List<HiveTableFiledBean> fieldList) {
this.fieldList = fieldList;
}
public List<String> getPrimaryTypeList() {
return primaryTypeList;
}
}

View File

@ -1,40 +0,0 @@
/**
* @Project: javaHiveAdimin
* @Title: HiveTableFiledBean.java
* @Package org.hiveadmin.hive.beans
* @Description: TODO
* @author wangjie wangjie370124@163.com
* @date Aug 18, 2013 4:05:13 PM
* @version V1.0
*/
package org.hiveadmin.hive.beans;
/**
* @ClassName HiveTableFiledBean
* @Description TODO
* @author wangjie wangjie370124@163.com
* @date Aug 18, 2013 4:05:13 PM
*/
public class HiveTableFiledBean{
private String col_name;
private String col_type;
private String col_comment;
public String getCol_name() {
return col_name;
}
public void setCol_name(String col_name) {
this.col_name = col_name;
}
public String getCol_type() {
return col_type;
}
public void setCol_type(String col_type) {
this.col_type = col_type;
}
public String getCol_comment() {
return col_comment;
}
public void setCol_comment(String col_comment) {
this.col_comment = col_comment;
}
}

View File

@ -1,25 +0,0 @@
package org.hiveadmin.hive.beans;
import java.util.List;
import java.util.Map;
public class Partition {
private List<Columns>partition_spec;
private String location;
public List<Columns> getPartition_spec() {
return partition_spec;
}
public void setPartition_spec(List<Columns> partition_spec) {
this.partition_spec = partition_spec;
}
public String getLocation() {
return location;
}
public void setLocation(String location) {
this.location = location;
}
}

View File

@ -1,26 +1,37 @@
/**
* @Project: javaHiveAdimin
* @Title: User.java
* @Package org.hiveadmin.hive.beans
* @Description: TODO
* @author wangjie wangjie370124@163.com
* @date Jul 19, 2013 9:09:26 PM
* @version V1.0
*/
* @Project: javaHiveAdimin
* @Title: User.java
* @Package org.hiveadmin.hive.beans
* @author wangjie,wangjie370124@163.com
* @Copyright: 2013 www.1000chi.comInc. All rights reserved.
* @version V1.0
*/
package org.hiveadmin.hive.beans;
/**
* @ClassName User
* @Description TODO
* @author wangjie wangjie370124@163.com
*This class is to describe user information.
* @author Wangjie wangjie370124@163.com
* @date Jul 19, 2013 9:09:26 PM
*/
public class User {
/**
* userid:this property is the id of user
*/
private int userid;
/**
* username:this property is the name of user
*/
private String username;
/**
* usergroup:this property is the group which this user belongs to
*/
private String usergroup;
/**
* userpass:this property is the user's password
*/
private String userpass;
public int getUserid() {
return userid;
}

View File

@ -1,28 +1,66 @@
package org.hiveadmin.hive.service;
import java.sql.ResultSet;
import java.util.List;
import java.util.Map;
import org.hiveadmin.hive.beans.HistoryRecord;
import org.hiveadmin.hive.dao.impl.UserHistoryLog;
/**
* @ClassName HiveDatabaseService
* @Description HiveDatabase操作类
*/
public interface HiveDatabaseService {
public String getLog();
public void setHistoryRecord(HistoryRecord historyRecord);
public List<String> listDatabase() throws Exception;
public void createDatebase(String name,String comment,String hdfspath,Map<String, String> dbproperties) throws Exception;
public void deleteDatebase(String name,String type) throws Exception;
/**
* @Title: testxx
* @Description: TODO
* @param     设定文件
* @return void    返回类型
* @throws
*/
}
/**
* @Project: javaHiveAdimin
* @Title: HiveDatabaseService.java
* @Package org.hiveadmin.hive.service.impl
* @author xuliang,xuliang12@octaix.iscas.ac.cn
* @Copyright: 2013 www.1000chi.comInc. All rights reserved.
* @version V1.0
*/
package org.hiveadmin.hive.service;
import java.util.List;
import java.util.Map;
import javax.annotation.Resource;
import org.hiveadmin.hive.beans.HistoryRecord;
import org.hiveadmin.hive.dao.impl.UserHistoryLog;
/**
* HiveDatabaseService is created to describe
* @author xuliang,xuliang12@octaix.iscas.ac.cn
*/
public interface HiveDatabaseService {
/**
* setHistoryRecord is the method is to reset the historyRecord property
* @param historyRecord an object of HistoryRecord
*/
public abstract void setHistoryRecord(HistoryRecord historyRecord);
public abstract void setUserHistoryLog(UserHistoryLog userHistoryLog);
/**
*createDatebase
* @param name
* @param comment
* @param hdfspath
* @param dbproperties
* @throws Exception
* @see org.hiveadmin.hive.service.HiveDatabaseService#createDatebase(java.lang.String, java.lang.String, java.lang.String, java.util.Map)
*/
public abstract void createDatebase(String name, String comment,
String hdfspath, Map<String, String> dbproperties) throws Exception;
/**
*deleteDatebase
* @param name
* @param type
* @throws Exception
* @see org.hiveadmin.hive.service.HiveDatabaseService#deleteDatebase(java.lang.String, java.lang.String)
*/
public abstract void deleteDatebase(String name, String type)
throws Exception;
/**
*listDatabase
* @return
* @throws Exception
* @see org.hiveadmin.hive.service.HiveDatabaseService#listDatabase()
*/
public abstract List<String> listDatabase() throws Exception;
}

View File

@ -1,9 +1,296 @@
/**
* @Project: javaHiveAdimin
* @Title: HiveTableService.java
* @Package org.hiveadmin.hive.service
* @author xuliang,xuliang12@octaix.iscas.ac.cn
* @Copyright: 2013 www.1000chi.comInc. All rights reserved.
* @version V1.0
*/
package org.hiveadmin.hive.service;
import java.sql.ResultSet;
import java.util.HashMap;
import java.util.List;
import org.hiveadmin.hive.beans.Columns;
/**
* HiveTableService is created to describe
*
* @author xuliang,xuliang12@octaix.iscas.ac.cn
*/
public interface HiveTableService {
/**
* createTables is the method to achieve the creation of hive table.
*
* @param external
* Lets you create a table and provide a LOCATION so that Hive
* does not use a default location for this table
* @param if_not_exists
* To skip the error which is thrown if a table or view with the
* same name already exists
* @param database_name
* The name of database
* @param table_name
* The name of table
* @param columns
* The columns of table
* @param table_comment
* The comment of table
* @param P_columns
* The partition of table
* @param cluser_cols_name
* The name of columns that is used to cluster
* @param clu_sorted_cols
* The name of columns that is used to sort
* @param num_buckets
* The number of buckets
* @param skewed_col_names
* The name of columns that is used to skew
* @param skewed_col_values
* The value of columns that is used to skew
* @param row_format_value
* The format of row
* @param file_format
* The format of file
* @param row_sorted_by_cols
* The columns that is used to sort
* @param hdfs_path
* The path of data file
* @param select_statement
* The statement of selection
* @param exist_table_or_view_name
* The name of an exist table or view
* @throws Exception
*/
public abstract void createTables(boolean external, boolean if_not_exists,
String database_name, String table_name, List<Columns> columns,
String table_comment, List<Columns> P_columns,
List<String> cluser_cols_name, List<Columns> clu_sorted_cols,
String num_buckets, List<String> skewed_col_names,
List<String> skewed_col_values, String row_format_value,
String file_format, List<Columns> row_sorted_by_cols,
String hdfs_path, String select_statement,
String exist_table_or_view_name) throws Exception;
}
/**
* dropTable is the method to removes metadata and data for this table.
*
* @param table_name
* The name of table
* @param database_name
* The name of database
* @throws Exception
*/
public abstract void dropTable(String table_name, String database_name)
throws Exception;
/**
* truncateTable is the method to removes all rows from a table or
* partition(s)
*
* @param table_name
* The name of table
* @param partition_spec
* The name and value of partition
* @param database_name
* The name of database
* @throws Exception
*/
public abstract void truncateTable(String table_name,
List<Columns> partition_spec, String database_name)
throws Exception;
/**
* renameTable is the method to change the name of a table to a different
* name.
*
* @param old_name
* The original name of table
* @param new_name
* The new name of table
* @param database_name
* The name of database which the table belongs to
* @throws Exception
*/
public abstract void renameTable(String old_name, String new_name,
String database_name) throws Exception;
/**
* changeColumn is the method to change a column's name, data type, comment,
* or position.
*
* @param table_name
* The name of table
* @param has_column
* The flag of "COLUMN" property
* @param col_old_name
* The original name of column
* @param col_new_name
* The new name of column
* @param col_type
* The type of column
* @param col_comment
* The comment of column
* @param first_or_after
* The flag of relative position
* @param after_col_name
* The name of relative column
* @param database_name
* The name of database
* @throws Exception
*/
public abstract void changeColumn(String table_name, boolean has_column,
String col_old_name, String col_new_name, String col_type,
String col_comment, int first_or_after, String after_col_name,
String database_name) throws Exception;
/**
* addorReplacecolumn is the method to add new columns to the end of the
* existing columns but before the partition columns or to removes all
* existing columns and adds the new set of columns.
*
* @param replace_or_add
* The flag of add a column or replace columns
* @param table_name
* The name of table
* @param columns
* The columns that will be added or replace
* @param database_name
* The name of database
* @throws Exception
*/
public abstract void addorReplacecolumn(boolean replace_or_add,
String table_name, List<Columns> columns, String database_name)
throws Exception;
/**
* describeTable is the method to shows the list of columns including
* partition columns for the given table.
*
* @param table_name
* The name of table
* @param database_name
* The name of database
* @throws Exception
* @return List<Columns> The information about tale's columns
*/
public abstract List<Columns> describeTable(String table_name,
String database_name) throws Exception;
/**
* showTable is the method to list all the base tables and views in the
* current database
*
* @param database_name
* The name of database
* @throws Exception
* @return List<String> The list of the table
*/
public abstract List<String> showTable(String database_name)
throws Exception;
/**
* showColumns is the method to show all the columns in a table including
* partition columns
*
* @param tb_in_or_from
* The flag of "IN" or "FROM" for table
* @param table_name
* The name of table
* @param database_name
* The name of database
* @param db_in_or_from
* The flag of "IN" or "FROM" for database
* @throws Exception
* @return List<String> The list of columns in table
*/
public abstract List<String> showColumns(boolean tb_in_or_from,
String table_name, String database_name, boolean db_in_or_from)
throws Exception;
/**
* selectFromTable is the method to select the data from table.
*
* @param null_all_dist
* The flag of "NULL" or "ALL" or "DISTINCT"
* @param select_exprs
* The names of columns that will be selected from table
* @param table_name
* The name of table
* @param where_condition
* A boolean expression
* @param group_col_list
* The names of columns will be as s group
* @param cluster_col_list
* The names of columns that will be used in clustering
* @param dis_col_list
* the name of columns that will be used in distributing
* @param sort_col_list
* The names of columns will be used in sorting
* @param limit_num
* The number of the result will be return
* @param database_name
* the name of database
* @throws Exception
* @return List<List<String>> the list of data which user want
*/
public abstract List<List<String>> selectFromTable(int null_all_dist,
List<String> select_exprs, String table_name,
List<String> where_condition, List<String> group_col_list,
List<String> cluster_col_list, List<String> dis_col_list,
List<Columns> sort_col_list, int limit_num, String database_name)
throws Exception;
/**
* loadFileIntoTable is the method to load files into tables.<br>
* Hive does not do any transformation while loading data into tables. Load
* operations are currently pure copy/move operations that move datafiles
* into locations corresponding to Hive tables
* <p>
*
* @param inpath
* The path of file that will be loaded
* @param overwrite
* The flag of property "OVERWRITE"
* @param table_name
* The name of table
* @param local
* The flag of "LOCATION"
* @param partition_spec
* The name and value of partitions
* @param database_name
* the name of database which table belongs to
* @throws Exception
*/
public abstract void loadFileIntoTable(String inpath, boolean overwrite,
String table_name, boolean local, List<Columns> partition_spec,
String database_name) throws Exception;
/**
* cloneTo is the method to get the list of database
*
* @throws Exception
* @return HashMap<String,String> the name and value of database ,this will
* be used by the select tag in Strust2
*/
public abstract HashMap<String, String> cloneTo() throws Exception;
/**
* cloneTable is the method to copy an exist table to an exist database and
* rename the table.
*
* @param database_name
* The name of database which the original table belongs to
* @param table_name
* The name of original table
* @param to_database_name
* The name of database which the table will be copy to
* @param table_new_name
* The new name of table
* @throws Exception
*/
public abstract void cloneTable(String database_name, String table_name,
String to_database_name, String table_new_name) throws Exception;
}

View File

@ -1,194 +1,250 @@
/**
* @Project: javaHiveAdimin
* @Title: HiveDatabaseServiceImpl.java
* @Package org.hiveadmin.hive.service.impl
* @author Wangjie,wangjie12@octaix.iscas.ac.cn
* @Copyright: 2013 www.1000chi.comInc. All rights reserved.
* @version V1.0
*/
package org.hiveadmin.hive.service.impl;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import javax.annotation.Resource;
import org.apache.hadoop.hive.jdbc.HiveConnection;
import org.apache.hadoop.hive.ql.parse.HiveParser.metastoreCheck_return;
import org.apache.hadoop.hive.ql.parse.HiveParser.restrictOrCascade_return;
import org.apache.hadoop.hive.ql.parse.HiveParser_IdentifiersParser.booleanValue_return;
import org.apache.log4j.Logger;
import org.hiveadmin.hive.beans.HistoryRecord;
import org.hiveadmin.hive.dao.impl.UserHistoryLog;
import org.hiveadmin.hive.hiveutils.HiveConnectionBean;
import org.hiveadmin.hive.service.HiveDatabaseService;
import org.springframework.dao.DataAccessResourceFailureException;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.stereotype.Component;
import org.springframework.stereotype.Service;
/**
* @ClassName HiveDatabaseServiceImpl
* @Description hive数据库操作实现
* HiveDatabaseServiceImpl is created to describe HiveDatabaseServiceImpl is
* created to describe the operations on the hive database.
* HiveDatabaseServiceImpl implements the basic function bout the operation on
* the database
*
* @author Wangjie,wangjie12@octaix.iscas.ac.cn
*/
@Component(value="hiveDatabaseServiceImpl")
@Component(value = "hiveDatabaseServiceImpl")
public class HiveDatabaseServiceImpl implements HiveDatabaseService {
/**
* hiveConnection:this property is used to connection hive
*/
private HiveConnectionBean hiveConnection;
/**
* userHistoryLog:this property is to push the records about user into the
* database
*/
private UserHistoryLog userHistoryLog;
/**
* historyRecord:this property is to describe the records about operation on
* the database
*/
private HistoryRecord historyRecord;
public HiveDatabaseServiceImpl(){
/**
* This method can create a new HiveDatabaseServiceImpl object.
*/
public HiveDatabaseServiceImpl() {
}
/**
* getHistoryRecord is the method to return the history record
*
* @return HistoryRecord
*/
public HistoryRecord getHistoryRecord() {
return historyRecord;
}
/**
* setHistoryRecord
*
* @param historyRecord
* @see org.hiveadmin.hive.service.impl.HiveDatabaseService#setHistoryRecord(org.hiveadmin.hive.beans.HistoryRecord)
*/
@Override
public void setHistoryRecord(HistoryRecord historyRecord) {
this.historyRecord = historyRecord;
}
public UserHistoryLog getUserHistoryLog() {
return userHistoryLog;
}
/**
* setUserHistoryLog
*
* @param userHistoryLog
* @see org.hiveadmin.hive.service.impl.HiveDatabaseService#setUserHistoryLog(org.hiveadmin.hive.dao.impl.UserHistoryLog)
*/
@Override
@Resource
public void setUserHistoryLog(UserHistoryLog userHistoryLog) {
this.userHistoryLog = userHistoryLog;
}
public HiveConnectionBean getHiveConnection() {
return hiveConnection;
}
@Resource
public void setHiveConnection(HiveConnectionBean hiveConnection) {
this.hiveConnection = hiveConnection;
}
private Logger log = Logger.getLogger(this.getClass());
private String curlog;
public String getLog(){
public String getLog() {
return this.curlog;
}
/**
* @param name: 要创建的数据库名称
* @param comment: 创建数据库的comment字段
* @param hdfspath:创建数据库的location字段,指在hdfs上的位置
* @param dbproperties:数据库的properties字段,为一个map
* @throws Exception
* @throw RuntimeException:创建失败
* (non-Javadoc)
* @see org.hiveadmin.hive.service.HiveDatabaseService#createDatebase(java.lang.String, java.lang.String, java.lang.String, java.util.Map)
* createDatebase
*
* @param name
* @param comment
* @param hdfspath
* @param dbproperties
* @throws Exception
* @see org.hiveadmin.hive.service.impl.HiveDatabaseService#createDatebase(java.lang.String,
* java.lang.String, java.lang.String, java.util.Map)
*/
@Override
public void createDatebase(String name, String comment, String hdfspath,
Map<String, String> dbproperties) throws Exception {
if (name==null || name.trim().length()<1){
if (name == null || name.trim().length() < 1) {
log.error("create database error. database name param is null");
throw new Exception("create database error. database name param is null");
throw new Exception(
"create database error. database name param is null");
}
String sql = "CREATE DATABASE IF NOT EXISTS "+name;
if (comment !=null && comment.trim().length()>0){
sql+=" COMMENT '"+comment+" ' ";
String sql = "CREATE DATABASE IF NOT EXISTS " + name;
if (comment != null && comment.trim().length() > 0) {
sql += " COMMENT '" + comment + " ' ";
}
if (hdfspath !=null && hdfspath.trim().length()>0){
sql+=" LOCATION '"+hdfspath+" ' ";
if (hdfspath != null && hdfspath.trim().length() > 0) {
sql += " LOCATION '" + hdfspath + " ' ";
}
if (dbproperties!=null && dbproperties.size()>0){
log.info("properties is not null. size:"+dbproperties.size());
if (dbproperties != null && dbproperties.size() > 0) {
log.info("properties is not null. size:" + dbproperties.size());
Iterator<String> it = dbproperties.keySet().iterator();
sql+=" WITH DBPROPERTIES ( ";
String inner="";
while(it.hasNext()){
sql += " WITH DBPROPERTIES ( ";
String inner = "";
while (it.hasNext()) {
String key = it.next();
String value = dbproperties.get(key);
inner+="'"+key+"'='"+value+"'";
if(it.hasNext())
inner+=",";
inner += "'" + key + "'='" + value + "'";
if (it.hasNext())
inner += ",";
}
sql+=inner+")";
sql += inner + ")";
}
historyRecord.setOp_sql(sql);
Connection conn=null;
try{
conn= hiveConnection.getConnection();
if (conn==null){
Connection conn = null;
try {
conn = hiveConnection.getConnection();
if (conn == null) {
log.error("get hive connection is null. can't do create database.");
throw new Exception("get hive connection is null. can't do create database.");
throw new Exception(
"get hive connection is null. can't do create database.");
}
Statement stmt = conn.createStatement();
boolean res = stmt.execute(sql);
if(res != true){
if (res != true) {
log.error("create database res:false");
throw new Exception("failed to create db. sql:"+sql);
throw new Exception("failed to create db. sql:" + sql);
}
log.info("success to create database. sql cmd:"+sql);
log.info("success to create database. sql cmd:" + sql);
historyRecord.setOp_res(true);
userHistoryLog.addHistotyRecord(historyRecord);
conn.close();
}catch(Exception e){
log.error("failed to create database. sql cmd:"+sql +"[exception:"+e.getMessage()+"]");
} catch (Exception e) {
log.error("failed to create database. sql cmd:" + sql
+ "[exception:" + e.getMessage() + "]");
historyRecord.setOp_res(false);
throw new Exception("failed to create database. [sql cmd:"+sql);
throw new Exception("failed to create database. [sql cmd:" + sql);
}
}
/**
* @param name:要删除的数据库名称
* @param type:删除的类型,类型为[RESTRICT|CASCADE]
* @throws Exception
* deleteDatebase
*
* @param name
* @param type
* @throws Exception
* @see org.hiveadmin.hive.service.impl.HiveDatabaseService#deleteDatebase(java.lang.String,
* java.lang.String)
*/
@Override
public void deleteDatebase(String name, String type) throws Exception {
if(name==null || name.trim().length()<1){
if (name == null || name.trim().length() < 1) {
log.error("delete database error. database param is null");
throw new Exception("delete database error. database param is null");
}
String sql ="DROP DATABASE IF EXISTS "+name+" ";
if(type !=null && type.trim().length()>0 ){
sql+= type;
String sql = "DROP DATABASE IF EXISTS " + name + " ";
if (type != null && type.trim().length() > 0) {
sql += type;
}
historyRecord.setOp_sql(sql);
try{
Connection conn =null;
conn = hiveConnection.getConnection();
try {
Connection conn = null;
conn = HiveConnectionBean.getConnection();
Statement stmt = conn.createStatement();
boolean res = stmt.execute(sql);
if(res !=true){
if (res != true) {
log.info("delete database res:false");
throw new Exception();
}
else{
log.info("success to delete database. sql:"+sql);
} else {
log.info("success to delete database. sql:" + sql);
historyRecord.setOp_res(true);
userHistoryLog.addHistotyRecord(historyRecord);
conn.close();
}
}catch(Exception e){
log.info("failed to delete database. [sql:"+sql);
} catch (Exception e) {
log.info("failed to delete database. [sql:" + sql);
historyRecord.setOp_res(false);
userHistoryLog.addHistotyRecord(historyRecord);
throw e;
}
}
/* (non-Javadoc)
* @see org.hiveadmin.hive.service.HiveDatabaseService#listDatabase()
/**
* listDatabase
*
* @return
* @throws Exception
* @see org.hiveadmin.hive.service.impl.HiveDatabaseService#listDatabase()
*/
@Override
public List<String> listDatabase() throws Exception {
String sql = "show databases";
Connection conn = null;
conn= hiveConnection.getConnection();
if(conn==null){
conn = hiveConnection.getConnection();
if (conn == null) {
log.error("get hive connection error!");
return null;
}
historyRecord.setOp_sql(sql);
try{
try {
Statement stmt = conn.createStatement();
ResultSet res = stmt.executeQuery(sql);
List<String> databaseList = new ArrayList<String>();
while(res.next()){
while (res.next()) {
databaseList.add(res.getString(1));
}
res.close();
@ -196,11 +252,11 @@ public class HiveDatabaseServiceImpl implements HiveDatabaseService {
historyRecord.setOp_res(true);
userHistoryLog.addHistotyRecord(historyRecord);
return databaseList;
}catch(Exception e){
} catch (Exception e) {
historyRecord.setOp_res(false);
userHistoryLog.addHistotyRecord(historyRecord);
throw e;
}
}
}
}

View File

@ -1,120 +0,0 @@
package org.hiveadmin.hive.service.impl;
import java.sql.Statement;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.log4j.Logger;
import org.hiveadmin.hive.beans.Columns;
import org.hiveadmin.hive.hiveutils.HiveConnectionBean;
public class HiveIndexServiceImpl {
private Logger log = Logger.getLogger(this.getClass());
public void createIndex(String index_name, String base_table_name,
List<String> column_spec, String index_type,
boolean with_deferred_rebuild, List<Columns> properties_spec,
String index_table_name, String row_format, String stored_as,
String stored_by_with, String hdfs_path,
List<Columns> tblproperties, String index_comment)
throws Exception {
String sql = "create index " + index_name + " on table "
+ base_table_name + " ";
if (!column_spec.isEmpty()) {
sql = sql + "( ";
Iterator<String> p_entries = column_spec.iterator();
while (p_entries.hasNext()) {
sql = sql + p_entries.next();
if (p_entries.hasNext()) {
sql = sql + " , ";
}
}
sql = sql + ") ";
}
sql = sql + " as " + index_type+ " ";
if (with_deferred_rebuild) {
sql = sql + " with deferres rebuild ";
}
if (!properties_spec.isEmpty()) {
sql = sql + " idxproperties (";
Iterator<Columns> p_entries = properties_spec.iterator();
while (p_entries.hasNext()) {
Columns p_entry = p_entries.next();
sql = sql + p_entry.getCols_name() + "='" + p_entry.getCols_type()+ "' ";
if (p_entries.hasNext()) {
sql = sql + "' , ";
}
}
sql = sql + "') ";
}
if (!(index_table_name == null || index_table_name.equals(""))) {
sql = sql + " in table " + index_table_name;
}
if (!(stored_as == null ||stored_as.equals(""))) {
sql = sql + " stored as " + stored_as + " ";
} else {
if (stored_by_with != null) {
// 未完成
}
}
if (!(hdfs_path == null || hdfs_path.equals(""))) {
sql = sql + " location " + hdfs_path+ " ";
}
if (!tblproperties.isEmpty()) {
sql = sql + " tblproperties (";
Iterator<Columns> p_entries = tblproperties.iterator();
while (p_entries.hasNext()) {
Columns p_entry = p_entries.next();
sql = sql + p_entry.getCols_name() + "='" + p_entry.getCols_type() + "' ";
if (p_entries.hasNext()) {
sql = sql + "' , ";
}
}
sql = sql + "') ";
}
if (!(index_comment == null ||index_comment.equals(""))) {
sql = sql +index_comment;
}
Statement stmt=HiveConnectionBean.getStmt();
boolean res = stmt.execute(sql);
stmt.close();
log.info("Sucess to create table!");
}
public void dropIndex(boolean if_exists, String index_name, String table_name)
throws Exception {
String sql = "drop index ";
if (if_exists) {
sql = sql + " if exists ";
}
sql = sql + index_name + " on " + table_name;
Statement stmt=HiveConnectionBean.getStmt();
boolean res = stmt.execute(sql);
stmt.close();
/*
* if (!res) { log.error("Fail to excute :" + sql); throw new
* Exception("Fail to excute :" + sql);
*
* }
*/
log.info("Sucess to create table!");
// throw new Exception("Sucess to excute :" + sql);
}
}

File diff suppressed because it is too large Load Diff

View File

@ -1,140 +0,0 @@
package org.hiveadmin.hive.service.impl;
import java.sql.Statement;
import java.util.List;
import java.util.Map;
import org.apache.log4j.Logger;
import java.util.Map.Entry;
import org.hiveadmin.hive.beans.Columns;
import org.hiveadmin.hive.hiveutils.HiveConnectionBean;
import java.util.Iterator;
public class HiveViewServiceImpl {
private Logger log = Logger.getLogger(this.getClass());
public void createView(boolean if_not_exists, String view_name,
List<Columns> columns, String comment,
List<Columns> tblproperties_spec, String select_sentance)
throws Exception {
String sql = "create view ";
if (if_not_exists) {
sql = sql + " if not exists ";
}
sql = sql + view_name;
if (!columns.isEmpty()) {
Iterator<Columns> p_entries = columns
.iterator();
sql = sql + " ( ";
while (p_entries.hasNext()) {
Columns p_entry = p_entries.next();
sql = sql + p_entry.getCols_name() + " comment '"
+ p_entry.getCols_type() + "' ";
if (p_entries.hasNext()) {
sql = sql + "' , ";
}
}
sql = sql + "') ";
}
if (!(comment == null || comment.equals(""))) {
sql = sql + " comment '" + comment + "' ";
}
if (!tblproperties_spec.isEmpty()) {
sql = sql + " TBLPROPERTIES (";
Iterator<Columns> p_entries = tblproperties_spec.iterator();
while (p_entries.hasNext()) {
Columns p_entry = p_entries.next();
sql = sql + p_entry.getCols_name() + "='" + p_entry.getCols_type() + "' ";
if (p_entries.hasNext()) {
sql = sql + "' , ";
}
}
sql = sql + "') ";
}
sql = sql + select_sentance;
Statement stmt=HiveConnectionBean.getStmt();
boolean res = stmt.execute(sql);
stmt.close();
log.info("Sucess to create table!");
}
public void dropView(boolean if_exists, String view_name) throws Exception {
String sql = "drop view ";
if (if_exists) {
sql = sql + " if exists ";
}
sql = sql + view_name;
Statement stmt=HiveConnectionBean.getStmt();
boolean res = stmt.execute(sql);
stmt.close();
/*
* if (!res) { log.error("Fail to excute :" + sql); throw new
* Exception("Fail to excute :" + sql);
*
* }
*/
log.info("Sucess to create table!");
// throw new Exception("Sucess to excute :" + sql);
}
public void alterViewProperties(String view_name,
List<Columns> table_properties) throws Exception {
String sql = "alter view " + view_name + " set tblproperties (";
Iterator<Columns> p_entries = table_properties
.iterator();
while (p_entries.hasNext()) {
Columns p_entry = p_entries.next();
sql = sql + p_entry.getCols_name() + "='" + p_entry.getCols_type() + "' ";
if (p_entries.hasNext()) {
sql = sql + "' , ";
}
}
sql = sql + "') ";
Statement stmt=HiveConnectionBean.getStmt();
boolean res = stmt.execute(sql);
stmt.close();
if (!res) {
log.error("Fail to excute :" + sql);
throw new Exception("Fail to excute :" + sql);
}
log.info("Sucess to create table!");
throw new Exception("Sucess to excute :" + sql);
}
public void alterViewAsSelect(String view_name, String select_sentance)
throws Exception {
String sql = "alter view " + view_name + " as " + select_sentance;
Statement stmt=HiveConnectionBean.getStmt();
boolean res = stmt.execute(sql);
stmt.close();
/*
* if (!res) { log.error("Fail to excute :" + sql); throw new
* Exception("Fail to excute :" + sql);
*
* }
*/
log.info("Sucess to create table!");
// throw new Exception("Sucess to excute :" + sql);
}
}