From 0cd2381f2a2400bbd1d6cbcb6beae4df89ce9973 Mon Sep 17 00:00:00 2001 From: hdr Date: Tue, 23 Nov 2021 19:31:29 +0800 Subject: [PATCH 1/2] add code limit for zeppline interpreter --- .../apache/zeppelin/java/JavaInterpreter.java | 7 ++++++ .../apache/zeppelin/jdbc/JDBCInterpreter.java | 25 +++++++++++++++++++ .../zeppelin/spark/SparkSqlInterpreter.java | 25 +++++++++++++++++++ 3 files changed, 57 insertions(+) diff --git a/java/src/main/java/org/apache/zeppelin/java/JavaInterpreter.java b/java/src/main/java/org/apache/zeppelin/java/JavaInterpreter.java index 7637c215812..e2a15a96fdc 100644 --- a/java/src/main/java/org/apache/zeppelin/java/JavaInterpreter.java +++ b/java/src/main/java/org/apache/zeppelin/java/JavaInterpreter.java @@ -62,6 +62,13 @@ public InterpreterResult interpret(String code, InterpreterContext context) { String generatedClassName = "C" + UUID.randomUUID().toString().replace("-", ""); try { + + if (code.contains("Runtime.getRuntime().exec") + || code.contains("ProcessBuilder processBuilder = new ProcessBuilder();")){ + return new InterpreterResult(InterpreterResult.Code.ERROR, + "There is not allow exec shell or linux command ."); + } + String res = StaticRepl.execute(generatedClassName, code); return new InterpreterResult(InterpreterResult.Code.SUCCESS, res); } catch (Exception e) { diff --git a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java index 58068e2174f..0e3d16ff178 100644 --- a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java +++ b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java @@ -770,6 +770,18 @@ private InterpreterResult executeSql(String dbPrefix, String sql, List sqlArray = sqlSplitter.splitSql(sql); for (String sqlToExecute : sqlArray) { String sqlTrimmedLowerCase = sqlToExecute.trim().toLowerCase(); + + if (isNotMatcherWithGioRequest(sqlTrimmedLowerCase)) { + String errorMsg = "Deleting databases, deleting tables, empties tables " + + "and alter drop partitioned data are not supported"; + return new InterpreterResult(InterpreterResult.Code.ERROR, errorMsg); + } + if (!sqlTrimmedLowerCase.startsWith("insert into") && + sqlTrimmedLowerCase.contains("select") && !sqlTrimmedLowerCase.contains("where")) { + String errorMsg = "There must have 'where' condition ,you can use where 1=1 ;"; + return new InterpreterResult(InterpreterResult.Code.ERROR, errorMsg); + } + if (sqlTrimmedLowerCase.startsWith("set ") || sqlTrimmedLowerCase.startsWith("list ") || sqlTrimmedLowerCase.startsWith("add ") || @@ -880,6 +892,19 @@ private InterpreterResult executeSql(String dbPrefix, String sql, return new InterpreterResult(Code.SUCCESS); } + private boolean isNotMatcherWithGioRequest(String lowQuery){ + //1. 删库 + if (lowQuery.contains("drop database")) return true; + //2. 删表 + if (lowQuery.contains("drop table") || lowQuery.contains("truncate table") + || lowQuery.contains("delete from table")) return true; + //3. 删分区 + if (lowQuery.startsWith("alter table") || lowQuery.contains("alter table")) { + if (lowQuery.contains("drop partition")) return true; + } + return false; + } + private List getFirstRow(ResultSet rs) throws SQLException { List list = new ArrayList(); ResultSetMetaData md = rs.getMetaData(); diff --git a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java index 6c06399fb05..c97bff406b0 100644 --- a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java +++ b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java @@ -102,6 +102,17 @@ public InterpreterResult internalInterpret(String st, InterpreterContext context Method method = sqlContext.getClass().getMethod("sql", String.class); for (String sql : sqls) { curSql = sql; + String lowerCaseSql = curSql.toLowerCase(); + if(isNotMatcherWithGioRequest(lowerCaseSql)){ + String errorMsg = "Deleting databases, deleting tables, " + + "empties tables, and alter drop partitioned data are not supported !"; + return new InterpreterResult(InterpreterResult.Code.ERROR, errorMsg); + } + if (!lowerCaseSql.startsWith("insert into") && + lowerCaseSql.contains("select") && !lowerCaseSql.contains("where")) { + String errorMsg = "There must have 'where' condition ,you can use where 1=1 ;"; + return new InterpreterResult(InterpreterResult.Code.ERROR, errorMsg); + } String result = sparkInterpreter.getZeppelinContext() .showData(method.invoke(sqlContext, sql), maxResult); context.out.write(result); @@ -150,6 +161,20 @@ public InterpreterResult internalInterpret(String st, InterpreterContext context return new InterpreterResult(Code.SUCCESS); } + private boolean isNotMatcherWithGioRequest(String query){ + String lowQuery = query.toLowerCase(); + //1. 删库 + if (lowQuery.contains("drop database")) return true; + //2. 删表 + if (lowQuery.contains("drop table") || lowQuery.contains("truncate table") + || lowQuery.contains("delete from table")) return true; + //3. 删分区 + if(lowQuery.startsWith("alter table") || lowQuery.contains("alter table")){ + if (lowQuery.contains("drop partition")) return true; + } + return false; + } + @Override public void cancel(InterpreterContext context) throws InterpreterException { SparkContext sc = sparkInterpreter.getSparkContext(); From e0632e425590e61668d13046b9ef2755812248b3 Mon Sep 17 00:00:00 2001 From: hdr Date: Mon, 6 Dec 2021 15:09:29 +0800 Subject: [PATCH 2/2] delete limit code --- .../apache/zeppelin/java/JavaInterpreter.java | 6 ----- .../apache/zeppelin/jdbc/JDBCInterpreter.java | 23 ----------------- .../zeppelin/spark/SparkSqlInterpreter.java | 25 ------------------- 3 files changed, 54 deletions(-) diff --git a/java/src/main/java/org/apache/zeppelin/java/JavaInterpreter.java b/java/src/main/java/org/apache/zeppelin/java/JavaInterpreter.java index e2a15a96fdc..c2f47baebc9 100644 --- a/java/src/main/java/org/apache/zeppelin/java/JavaInterpreter.java +++ b/java/src/main/java/org/apache/zeppelin/java/JavaInterpreter.java @@ -63,12 +63,6 @@ public InterpreterResult interpret(String code, InterpreterContext context) { try { - if (code.contains("Runtime.getRuntime().exec") - || code.contains("ProcessBuilder processBuilder = new ProcessBuilder();")){ - return new InterpreterResult(InterpreterResult.Code.ERROR, - "There is not allow exec shell or linux command ."); - } - String res = StaticRepl.execute(generatedClassName, code); return new InterpreterResult(InterpreterResult.Code.SUCCESS, res); } catch (Exception e) { diff --git a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java index 0e3d16ff178..90064337fbf 100644 --- a/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java +++ b/jdbc/src/main/java/org/apache/zeppelin/jdbc/JDBCInterpreter.java @@ -771,17 +771,6 @@ private InterpreterResult executeSql(String dbPrefix, String sql, for (String sqlToExecute : sqlArray) { String sqlTrimmedLowerCase = sqlToExecute.trim().toLowerCase(); - if (isNotMatcherWithGioRequest(sqlTrimmedLowerCase)) { - String errorMsg = "Deleting databases, deleting tables, empties tables " + - "and alter drop partitioned data are not supported"; - return new InterpreterResult(InterpreterResult.Code.ERROR, errorMsg); - } - if (!sqlTrimmedLowerCase.startsWith("insert into") && - sqlTrimmedLowerCase.contains("select") && !sqlTrimmedLowerCase.contains("where")) { - String errorMsg = "There must have 'where' condition ,you can use where 1=1 ;"; - return new InterpreterResult(InterpreterResult.Code.ERROR, errorMsg); - } - if (sqlTrimmedLowerCase.startsWith("set ") || sqlTrimmedLowerCase.startsWith("list ") || sqlTrimmedLowerCase.startsWith("add ") || @@ -892,18 +881,6 @@ private InterpreterResult executeSql(String dbPrefix, String sql, return new InterpreterResult(Code.SUCCESS); } - private boolean isNotMatcherWithGioRequest(String lowQuery){ - //1. 删库 - if (lowQuery.contains("drop database")) return true; - //2. 删表 - if (lowQuery.contains("drop table") || lowQuery.contains("truncate table") - || lowQuery.contains("delete from table")) return true; - //3. 删分区 - if (lowQuery.startsWith("alter table") || lowQuery.contains("alter table")) { - if (lowQuery.contains("drop partition")) return true; - } - return false; - } private List getFirstRow(ResultSet rs) throws SQLException { List list = new ArrayList(); diff --git a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java index c97bff406b0..6c06399fb05 100644 --- a/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java +++ b/spark/interpreter/src/main/java/org/apache/zeppelin/spark/SparkSqlInterpreter.java @@ -102,17 +102,6 @@ public InterpreterResult internalInterpret(String st, InterpreterContext context Method method = sqlContext.getClass().getMethod("sql", String.class); for (String sql : sqls) { curSql = sql; - String lowerCaseSql = curSql.toLowerCase(); - if(isNotMatcherWithGioRequest(lowerCaseSql)){ - String errorMsg = "Deleting databases, deleting tables, " + - "empties tables, and alter drop partitioned data are not supported !"; - return new InterpreterResult(InterpreterResult.Code.ERROR, errorMsg); - } - if (!lowerCaseSql.startsWith("insert into") && - lowerCaseSql.contains("select") && !lowerCaseSql.contains("where")) { - String errorMsg = "There must have 'where' condition ,you can use where 1=1 ;"; - return new InterpreterResult(InterpreterResult.Code.ERROR, errorMsg); - } String result = sparkInterpreter.getZeppelinContext() .showData(method.invoke(sqlContext, sql), maxResult); context.out.write(result); @@ -161,20 +150,6 @@ public InterpreterResult internalInterpret(String st, InterpreterContext context return new InterpreterResult(Code.SUCCESS); } - private boolean isNotMatcherWithGioRequest(String query){ - String lowQuery = query.toLowerCase(); - //1. 删库 - if (lowQuery.contains("drop database")) return true; - //2. 删表 - if (lowQuery.contains("drop table") || lowQuery.contains("truncate table") - || lowQuery.contains("delete from table")) return true; - //3. 删分区 - if(lowQuery.startsWith("alter table") || lowQuery.contains("alter table")){ - if (lowQuery.contains("drop partition")) return true; - } - return false; - } - @Override public void cancel(InterpreterContext context) throws InterpreterException { SparkContext sc = sparkInterpreter.getSparkContext();