Commit a844c69f a844c69f6fc8b446255097556682c6e05b2b6ef8 by chengyun

initialize

0 parents
Showing 97 changed files with 1959 additions and 0 deletions
1 #java相关
2 libs/*
3 *.iml
4 *.stackdump
5 *.orig
6 .idea
7 .settings
8 .classpath
9 .project
10 */src/main/webapp/build/*
11 # */src/main/webapp/ext/*
12 bootstrap.*
13 */target
14 */*/target
15 */*.zip
16 */*.tar
17 *.m3u8
1 /prepared/m3u8/20180608/drama/4730_1.m3u8
2 /prepared/m3u8/20180608/drama/4946_1.m3u8
3 /prepared/m3u8/20180608/drama/4734_1.m3u8
4 /prepared/m3u8/20180608/drama/4948_1.m3u8
5 /prepared/m3u8/20180608/drama/4760_1.m3u8
6 /prepared/m3u8/20180608/drama/4772_1.m3u8
7 /prepared/m3u8/20180608/drama/4806_1.m3u8
8 /prepared/m3u8/20180608/drama/4976_1.m3u8
No preview for this file type
1 <?xml version="1.0" encoding="UTF-8"?>
2
3 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
4 xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
5 <modelVersion>4.0.0</modelVersion>
6
7 <groupId>com.topdraw.sohu</groupId>
8 <artifactId>port</artifactId>
9 <version>1.0-SNAPSHOT</version>
10 <packaging>war</packaging>
11
12 <name>port Maven Webapp</name>
13
14 <dependencies>
15 <dependency>
16 <groupId>org.afflatus</groupId>
17 <artifactId>utility</artifactId>
18 <version>1.0.3</version>
19 </dependency>
20 <dependency>
21 <groupId>com.alibaba</groupId>
22 <artifactId>druid</artifactId>
23 <version>1.1.6</version>
24 </dependency>
25 <dependency>
26 <groupId>log4j</groupId>
27 <artifactId>log4j</artifactId>
28 <version>1.2.17</version>
29 </dependency>
30 <dependency>
31 <groupId>org.slf4j</groupId>
32 <artifactId>slf4j-log4j12</artifactId>
33 <version>1.7.21</version>
34 </dependency>
35 <dependency>
36 <groupId>com.alibaba</groupId>
37 <artifactId>fastjson</artifactId>
38 <version>1.2.20</version>
39 </dependency>
40 <dependency>
41 <groupId>mysql</groupId>
42 <artifactId>mysql-connector-java</artifactId>
43 <version>5.1.26</version>
44 </dependency>
45 <dependency>
46 <groupId>com.thoughtworks.xstream</groupId>
47 <artifactId>xstream</artifactId>
48 <version>1.4.7</version>
49 </dependency>
50 <dependency>
51 <groupId>org.quartz-scheduler</groupId>
52 <artifactId>quartz</artifactId>
53 <version>2.2.3</version>
54 </dependency>
55 <dependency>
56 <groupId>org.quartz-scheduler</groupId>
57 <artifactId>quartz-jobs</artifactId>
58 <version>2.2.3</version>
59 </dependency>
60 <dependency>
61 <groupId>javax.transaction</groupId>
62 <artifactId>jta</artifactId>
63 <version>1.1</version>
64 </dependency>
65 <dependency>
66 <groupId>commons-net</groupId>
67 <artifactId>commons-net</artifactId>
68 <version>3.3</version>
69 </dependency>
70 <dependency>
71 <groupId>org.dom4j</groupId>
72 <artifactId>dom4j</artifactId>
73 <version>2.0.0</version>
74 </dependency>
75 </dependencies>
76
77 <build>
78 <finalName>port</finalName>
79
80 <plugins>
81 <plugin>
82 <groupId>org.apache.maven.plugins</groupId>
83 <artifactId>maven-compiler-plugin</artifactId>
84 <version>3.1</version>
85 <configuration>
86 <source>1.8</source>
87 <target>1.8</target>
88 </configuration>
89 </plugin>
90
91 <plugin>
92 <groupId>org.apache.maven.plugins</groupId>
93 <artifactId>maven-dependency-plugin</artifactId>
94 <executions>
95 <execution>
96 <id>copy</id>
97 <phase>package</phase>
98 <goals>
99 <goal>copy-dependencies</goal>
100 </goals>
101 <configuration>
102 <outputDirectory>
103 ./target/lib
104 </outputDirectory>
105 </configuration>
106 </execution>
107 </executions>
108 </plugin>
109 </plugins>
110
111 <pluginManagement><!-- lock down plugins versions to avoid using Maven defaults (may be moved to parent pom) -->
112 <plugins>
113 <plugin>
114 <artifactId>maven-clean-plugin</artifactId>
115 <version>3.0.0</version>
116 </plugin>
117 <!-- see http://maven.apache.org/ref/current/maven-core/default-bindings.html#Plugin_bindings_for_war_packaging -->
118 <plugin>
119 <artifactId>maven-resources-plugin</artifactId>
120 <version>3.0.2</version>
121 </plugin>
122 <plugin>
123 <artifactId>maven-compiler-plugin</artifactId>
124 <version>3.7.0</version>
125 </plugin>
126 <plugin>
127 <artifactId>maven-surefire-plugin</artifactId>
128 <version>2.20.1</version>
129 </plugin>
130 <plugin>
131 <artifactId>maven-war-plugin</artifactId>
132 <version>3.2.0</version>
133 </plugin>
134 <plugin>
135 <artifactId>maven-install-plugin</artifactId>
136 <version>2.5.2</version>
137 </plugin>
138 <plugin>
139 <artifactId>maven-deploy-plugin</artifactId>
140 <version>2.8.2</version>
141 </plugin>
142 </plugins>
143 </pluginManagement>
144 </build>
145 </project>
1 package com.topdraw.sohu.listener;
2
3 import com.mysql.jdbc.AbandonedConnectionCleanupThread;
4 import com.topdraw.sohu.utils.PropertiesUtil;
5 import org.afflatus.utility.DruidUtil;
6 import org.afflatus.utility.MemcachedClient;
7 import org.quartz.Scheduler;
8 import org.quartz.SchedulerException;
9 import org.quartz.SchedulerFactory;
10 import org.quartz.impl.StdSchedulerFactory;
11 import org.slf4j.Logger;
12 import org.slf4j.LoggerFactory;
13
14 import javax.servlet.ServletContextEvent;
15 import javax.servlet.ServletContextListener;
16 import javax.servlet.annotation.WebListener;
17 import java.sql.Connection;
18 import java.sql.Driver;
19 import java.sql.DriverManager;
20 import java.util.Enumeration;
21
22 @WebListener
23 public class ServerListener implements ServletContextListener {
24 private static Logger logger;
25 private static final String CATALINA_BASE = "catalina.base";
26 private static final String JETTY_HOME = "jetty.home";
27 private static final String PROJECT_NAME = "project.name";
28 private static final String PROJECT_VERSION = "project.version";
29
30 public static Scheduler scheduler = null;
31
32 public static Boolean bStarted = true;
33
34 @Override
35 public void contextDestroyed(ServletContextEvent event) {
36 logger.debug("contextDestroyed");
37 if (null != scheduler) {
38 try {
39 // scheduler.pauseAll();
40 bStarted = false;
41 scheduler.shutdown(true);
42 } catch (SchedulerException e) {
43 logger.error("scheduler shutdown error", e);
44 }
45 }
46 // try {
47 // Thread.sleep(2000);
48 // } catch (InterruptedException e) {
49 // e.printStackTrace();
50 // }
51 try {
52 Enumeration<Driver> drivers = DriverManager.getDrivers();
53 while (drivers.hasMoreElements()) {
54 Driver driver = drivers.nextElement();
55 DriverManager.deregisterDriver(driver);
56 }
57 AbandonedConnectionCleanupThread.shutdown();
58 DruidUtil.closeAllDataSource();
59 MemcachedClient.shutdown();
60 } catch (Exception ex) {
61 ex.printStackTrace();
62 }
63 }
64
65 @Override
66 public void contextInitialized(ServletContextEvent event) {
67 if (null == System.getProperty(CATALINA_BASE)) {
68 String home = System.getProperty(JETTY_HOME);
69 if (null != home) {
70 System.setProperty(CATALINA_BASE, home);
71 } else {
72 System.setProperty(CATALINA_BASE, ".");
73 }
74 }
75
76 System.setProperty(PROJECT_NAME, PropertiesUtil.get(PROJECT_NAME));
77 System.setProperty("druid.registerToSysProperty", "true");
78
79 logger = LoggerFactory.getLogger(ServerListener.class);
80
81 logger.debug("contextInitialized [name:"
82 + PropertiesUtil.get(PROJECT_NAME) + ", version:"
83 + PropertiesUtil.get(PROJECT_VERSION) + "]");
84
85 Connection readConnection = null;
86 Connection writeConnection = null;
87 try {
88 readConnection = DruidUtil.getRandomReadConnection();
89 writeConnection = DruidUtil.getRandomWriteConnection();
90 } catch (Exception ex) {
91 logger.error("Connection init error", ex);
92 } finally {
93 if (null != readConnection) {
94 DruidUtil.close(readConnection);
95 }
96 if (null != writeConnection) {
97 DruidUtil.close(writeConnection);
98 }
99 }
100
101 try {
102 SchedulerFactory sf = new StdSchedulerFactory("quartz.properties");
103 scheduler = sf.getScheduler();
104 scheduler.start();
105 } catch (Exception e) {
106 logger.error("start quartz error", e);
107 }
108
109 }
110 }
1 package com.topdraw.sohu.port.job;
2
3 import com.topdraw.sohu.listener.ServerListener;
4 import com.topdraw.sohu.utils.SohuUtil;
5 import org.afflatus.utility.DruidUtil;
6 import org.quartz.*;
7 import org.slf4j.Logger;
8 import org.slf4j.LoggerFactory;
9
10 import java.sql.Connection;
11 import java.util.ArrayList;
12 import java.util.List;
13 import java.util.Map;
14
15 @DisallowConcurrentExecution
16 public class M3U8Job implements Job {
17 private static final Logger log = LoggerFactory.getLogger(M3U8Job.class);
18
19 private List<Map<String, Object>> listCmd = new ArrayList<>();
20
21 public void execute(JobExecutionContext context) throws JobExecutionException {
22 Connection writeConnection = null;
23
24 try {
25 writeConnection = DruidUtil.getRandomWriteConnection();
26 log.info("M3U8Job start");
27 listCmd = getTodoCmd();
28
29 if (listCmd.size() == 0) {
30 log.info("no cmd to download m3u8");
31 return;
32 } else {
33 if (null != ServerListener.scheduler) {
34 ServerListener.scheduler.pauseJob(context.getJobDetail().getKey());
35 }
36 }
37 while (listCmd.size() != 0) {
38 if (!ServerListener.bStarted) {
39 // tomcat 是否正在停止
40 log.info("tomcat is shutting down, the excuting job break");
41 break;
42 }
43 for (Map<String, Object> mapCmd : listCmd) {
44 SohuUtil util = new SohuUtil();
45 util.downloadM3U8ByVideoId((Long) mapCmd.get("video_id"));
46 }
47 listCmd = getTodoCmd();
48 }
49 if (null != ServerListener.scheduler) {
50 ServerListener.scheduler.resumeJob(context.getJobDetail().getKey());
51 }
52 } catch (Exception e) {
53 log.error("M3U8Job error", e);
54 if (null != ServerListener.scheduler) {
55 try {
56 ServerListener.scheduler.resumeJob(context.getJobDetail().getKey());
57 } catch (SchedulerException se) {
58 // TODO Auto-generated catch block
59 se.printStackTrace();
60 }
61 }
62 } finally {
63 DruidUtil.close(writeConnection);
64 }
65 }
66
67 private static List<Map<String, Object>> getTodoCmd() {
68 log.info("start get cmd to do...");
69 Connection readConnection = null;
70 List<Map<String, Object>> listRet = new ArrayList<>();
71 try {
72 readConnection = DruidUtil.getRandomReadConnection();
73 String sql = "SELECT * FROM sohu_video WHERE" +
74 //" tvPlayType <> 1 AND" +
75 " m3u8_path IS NULL" +
76 " AND definition IS NOT NULL LIMIT 0, 20";
77 listRet = DruidUtil.queryList(readConnection, sql);
78 } catch (Exception ex) {
79 ex.printStackTrace();
80 } finally {
81 DruidUtil.close(readConnection);
82 }
83 return listRet;
84 }
85 }
1 package com.topdraw.sohu.port.job;
2
3 import com.topdraw.sohu.listener.ServerListener;
4 import com.topdraw.sohu.utils.SohuUtil;
5 import org.quartz.*;
6 import org.slf4j.Logger;
7 import org.slf4j.LoggerFactory;
8
9 @DisallowConcurrentExecution
10 public class MetaDataJob implements Job {
11 private static final Logger log = LoggerFactory.getLogger(MetaDataJob.class);
12
13 public void execute(JobExecutionContext context) throws JobExecutionException {
14
15 try {
16 log.info("MetaDataJob start");
17 ServerListener.scheduler.pauseJob(context.getJobDetail().getKey());
18
19 SohuUtil util = new SohuUtil();
20 util.metaDataIncrementInjection();
21
22 //for (int i = 9; i <= 9; i++) {
23 // util.metaDataIncrementInjectionByXml("2018070" + i);
24 //}
25
26 if (null != ServerListener.scheduler) {
27 ServerListener.scheduler.resumeJob(context.getJobDetail().getKey());
28 }
29 } catch (Exception e) {
30 log.error("MetaDataJob error", e);
31 if (null != ServerListener.scheduler) {
32 try {
33 ServerListener.scheduler.resumeJob(context.getJobDetail().getKey());
34 } catch (SchedulerException se) {
35 // TODO Auto-generated catch block
36 se.printStackTrace();
37 }
38 }
39 }
40 }
41 }
1 package com.topdraw.sohu.utils;
2
3 import java.sql.Connection;
4 import java.util.HashMap;
5 import java.util.Map;
6
7 import org.afflatus.utility.DruidUtil;
8
9 import com.alibaba.fastjson.JSON;
10
11 public class BIInterfaceUtil {
12 private static final String BI_SWITCH = PropertiesUtil.get("BI_SWITCH");
13
14 public static void createMediaAssetsPublish(String strAction, String strPlatform, String strType, String strName,
15 Map<String, Object> mapData, Connection writeConnection) {
16 try {
17 if (BI_SWITCH.equals("off")) {
18 return;
19 }
20 Map<String, Object> mapSave = new HashMap<String, Object>();
21 mapSave.put("action", strAction);
22 mapSave.put("platform", strPlatform);
23 mapSave.put("type", strType);
24 mapSave.put("name", strName);
25 if (mapData.containsKey("create_time")) {
26 mapData.remove("create_time");
27 }
28 if (mapData.containsKey("update_time")) {
29 mapData.remove("update_time");
30 }
31 if (JSON.toJSONString(mapData).length() > 4096) {
32 if (mapData.containsKey("image")) {
33 mapData.remove("image");
34 }
35 if (mapData.containsKey("images")) {
36 mapData.remove("images");
37 }
38 }
39 mapSave.put("data", JSON.toJSONString(mapData));
40 DruidUtil.save(writeConnection, mapSave, "x_media_assets_publish");
41 } catch (Exception ex) {
42 ex.printStackTrace();
43 } finally {
44
45 }
46 }
47 }
1 package com.topdraw.sohu.utils;
2
3 import org.apache.commons.net.ftp.FTPClient;
4 import org.apache.commons.net.ftp.FTPFile;
5 import org.apache.commons.net.ftp.FTPReply;
6 import org.slf4j.Logger;
7 import org.slf4j.LoggerFactory;
8
9 import java.io.*;
10
11 public class FTPUtils {
12 private static final Logger logger = LoggerFactory
13 .getLogger(FTPUtils.class);
14
15 private final String PROJECT_PATH = this.getClass().getClassLoader().getResource("../../") == null ?
16 (System.getProperty("user.dir").endsWith("/") ? System.getProperty("user.dir")
17 : System.getProperty("user.dir") + "/")
18 : this.getClass().getClassLoader().getResource("../../").getPath();
19
20 /**
21 * Description: 向FTP服务器上传文件
22 * @param url FTP服务器hostname
23 * @param port FTP服务器端口
24 * @param username FTP登录账号
25 * @param password FTP登录密码
26 * @param path FTP服务器保存目录
27 * @param filename 上传到FTP服务器上的文件名
28 * @param input 输入流
29 * @return 成功返回true,否则返回false
30 */
31 public boolean uploadFile(String url, int port, String username, String password, String path, String filename, InputStream input) {
32 boolean success = false;
33 FTPClient ftp = new FTPClient();
34 try {
35 int reply;
36 ftp.connect(url, port);//连接FTP服务器
37 //如果采用默认端口,可以使用ftp.connect(url)的方式直接连接FTP服务器
38 ftp.login(username, password);//登录
39 reply = ftp.getReplyCode();
40 System.out.println("ReplyCode: " + reply);
41 if (!FTPReply.isPositiveCompletion(reply)) {
42 ftp.disconnect();
43 return success;
44 }
45 ftp.changeWorkingDirectory(path);
46 ftp.storeFile(filename, input);
47
48 input.close();
49 ftp.logout();
50 success = true;
51 } catch (IOException e) {
52 e.printStackTrace();
53 } finally {
54 if (ftp.isConnected()) {
55 try {
56 ftp.disconnect();
57 } catch (IOException ioe) {
58 }
59 }
60 }
61 return success;
62 }
63
64 /**
65 * Description: 从FTP服务器下载文件
66 * @param url FTP服务器hostname
67 * @param port FTP服务器端口
68 * @param username FTP登录账号
69 * @param password FTP登录密码
70 * @param remotePath FTP服务器上的相对路径
71 * @param fileName 要下载的文件名
72 * @param localPath 下载后保存到本地的路径
73 * @return
74 */
75 public static boolean downFile(
76 String url, int port, String username, String password, String remotePath, String fileName,
77 String localPath) {
78 boolean success = false;
79 FTPClient ftp = new FTPClient();
80 try {
81 int reply;
82 ftp.setConnectTimeout(10 * 1000);
83 ftp.connect(url, port);
84 //如果采用默认端口,可以使用ftp.connect(url)的方式直接连接FTP服务器
85 ftp.login(username, password);//登录
86 ftp.setBufferSize(102400);
87 ftp.setFileType(FTPClient.BINARY_FILE_TYPE);
88 reply = ftp.getReplyCode();
89 System.out.println("ReplyCode: " + reply);
90 if (!FTPReply.isPositiveCompletion(reply)) {
91 logger.info("isPositiveCompletion");
92 ftp.disconnect();
93 return success;
94 }
95 logger.info("remotePath: " + remotePath);
96 ftp.changeWorkingDirectory(remotePath);//转移到FTP服务器目录
97 ftp.enterLocalPassiveMode();
98 FTPFile[] fs = ftp.listFiles();
99 for (FTPFile ff:fs) {
100 if (ff.getName().equals(fileName)) {
101 File localFile = new File(localPath + "/" + ff.getName());
102 logger.info("file size: " + ff.getSize());
103
104 OutputStream is = new FileOutputStream(localFile);
105 ftp.retrieveFile(ff.getName(), is);
106 is.close();
107 logger.info("download file size: " + localFile.length());
108 if (ff.getSize() == localFile.length()) {
109 success = true;
110 } else {
111 logger.info("file size mismatch: " + ff.getSize() + " -> " + localPath.length());
112 }
113 break;
114 }
115 }
116 ftp.logout();
117 } catch (IOException e) {
118 e.printStackTrace();
119 success = false;
120 } finally {
121 if (ftp.isConnected()) {
122 try {
123 ftp.disconnect();
124 } catch (IOException ioe) {
125 ioe.printStackTrace();
126 }
127 }
128 }
129 return success;
130 }
131
132 public String downFile(String strFtpPath, String strRelativePath) {
133 String strUrl = "", strUserName = "", strPassWord = "", strRemotePath = "", strFileName = "", strLocalPath = "";
134 int iPort = 21;
135 // 截取FTP地址
136 final String strFtpFlag = "ftp://";
137 if (strFtpPath != null && strFtpPath.length() > 0 && strFtpPath.toLowerCase().contains(strFtpFlag)) {
138 // 首先去掉FTP
139 final String cutedFtp = strFtpPath.substring(strFtpPath.indexOf(strFtpFlag) + strFtpFlag.length());
140 // 首先截取用户名、密码、ip、端口
141 String str4 = "";
142 if (cutedFtp.indexOf("/") != -1) {
143 str4 = cutedFtp.substring(0, cutedFtp.indexOf("/"));
144 } else {
145 str4 = cutedFtp;
146 }
147 // 截取用户名、密码
148 String strUsernameAndPwd = str4.substring(0, str4.indexOf("@"));
149
150 // 截取ip、端口
151 String strIpAndPort = str4.substring(str4.indexOf("@") + 1);
152
153 // 开始获取ip和端口
154 if (!"".equals(strIpAndPort)) {
155 if (strIpAndPort.indexOf(":") != -1) {
156 strUrl = strIpAndPort.substring(0, strIpAndPort.indexOf(":"));
157 String strPort = strIpAndPort.substring(strIpAndPort.indexOf(":") + 1, strIpAndPort.length());
158 if (strPort != null) {
159 iPort = Integer.parseInt(strPort);
160 }
161 } else {
162 // 如果没有端口只获取IP
163 strUrl = strIpAndPort;
164 }
165 }
166
167 // 开始获取用户名和密码
168 if (!"".equals(strUsernameAndPwd)) {
169 strUserName = strUsernameAndPwd.substring(0, strUsernameAndPwd.indexOf(":"));
170 strPassWord = strUsernameAndPwd.substring(strUsernameAndPwd.indexOf(":") + 1);
171 }
172
173 // 截取ftp文件路径和文件名
174 String strFileNameAndPath = "";
175 if (cutedFtp.indexOf("/") != -1) {
176 strFileNameAndPath = cutedFtp.substring(cutedFtp.indexOf("/") + 1, cutedFtp.length());
177 } else {
178 strFileNameAndPath = "";
179 }
180 // 开始获取ftp文件路径和文件名
181 if (!"".equals(strIpAndPort)) {
182 if (strFileNameAndPath.indexOf("/") != -1) {
183 strRemotePath = strFileNameAndPath.substring(0, strFileNameAndPath.lastIndexOf("/"));
184 strFileName = strFileNameAndPath.substring(strFileNameAndPath.lastIndexOf("/") + 1,
185 strFileNameAndPath.length());
186 } else {
187 strFileName = strFileNameAndPath;
188 }
189 }
190 }
191
192 strLocalPath = PROJECT_PATH + strRelativePath + strFileName;
193 logger.info("file download target path: " + strLocalPath);
194 File file = new File(strLocalPath);
195 if (!file.isDirectory()) { // 目录不存在
196 String[] aPathSegments = strLocalPath.split("/");
197 String strWalkThroughPath = "/";
198 for (int i = 0; i < aPathSegments.length - 1; i++) {
199 strWalkThroughPath = strWalkThroughPath + "/" + aPathSegments[i];
200 file = new File(strWalkThroughPath);
201 if (!file.isDirectory()) {
202 file.mkdir();
203 }
204 }
205 }
206
207 boolean b = FTPUtils.downFile(strUrl, iPort, strUserName, strPassWord, strRemotePath, strFileName,
208 PROJECT_PATH + strRelativePath);
209 if (!b) {
210 // 下载失败
211 logger.info("file download error: " + strLocalPath);
212 return "";
213 } else {
214 logger.info("file saved: " + strLocalPath);
215 }
216 return strLocalPath;
217 }
218
219 public static void main(String[] args) {
220 try {
221 //FileInputStream in = new FileInputStream(new File("D:/adv.txt"));
222 //boolean flag = uploadFile("10.50.127.181", 21, "root", "bestvwin", "/tmp", "adv.txt", in);
223 //boolean flag = downFile("172.25.44.26", 21, "wacos", "wacos", "/opt/wacos/CTMSData/picture/2018/02/07",
224 // "20180207112330_320029.jpg", "");
225 //System.out.println(flag);
226 } catch (Exception e) {
227 e.printStackTrace();
228 }
229 }
230 }
1 package com.topdraw.sohu.utils;
2
3 import org.slf4j.Logger;
4 import org.slf4j.LoggerFactory;
5
6 import java.io.*;
7 import java.net.HttpURLConnection;
8 import java.net.MalformedURLException;
9 import java.net.URL;
10 import java.net.URLConnection;
11
12 public class HttpConnectionUtil {
13
14 private static final Logger log = LoggerFactory
15 .getLogger(HttpConnectionUtil.class);
16
17 /**
18 * @param strUrlPath 下载路径
19 * @param strDownloadDir 下载存放目录
20 * @param strDownloadDir 下载文件重命名(null保持原有名字)
21 * @return 返回下载文件
22 */
23 public static File downloadFile(String strUrlPath, String strDownloadDir, String strFileName) {
24 File file = null;
25 try {
26 // 统一资源
27 URL url = new URL(strUrlPath);
28 // 连接类的父类,抽象类
29 URLConnection urlConnection = url.openConnection();
30 // http的连接类
31 HttpURLConnection httpURLConnection = (HttpURLConnection) urlConnection;
32 // 设定请求的方法,默认是GET
33 httpURLConnection.setRequestMethod("GET");
34 // 设置字符编码
35 httpURLConnection.setRequestProperty("Charset", "UTF-8");
36
37 httpURLConnection.setRequestProperty("Accept-Encoding", "identity");
38 httpURLConnection.setReadTimeout(10000);
39 // 打开到此 URL 引用的资源的通信链接(如果尚未建立这样的连接)。
40 httpURLConnection.connect();
41
42 // 文件大小
43 int fileLength = httpURLConnection.getContentLength();
44
45 // 文件名
46 String filePathUrl = strFileName == null ? httpURLConnection.getURL().getFile() : strFileName;
47 String fileFullName = filePathUrl.substring(filePathUrl.lastIndexOf(File.separatorChar) + 1);
48
49 //log.info("file length -> " + fileLength);
50
51 URLConnection con = url.openConnection();
52
53 BufferedInputStream bin = new BufferedInputStream(httpURLConnection.getInputStream());
54
55 String path = strDownloadDir + File.separatorChar + fileFullName;
56 file = new File(path);
57 if (!file.getParentFile().exists()) {
58 file.getParentFile().mkdirs();
59 }
60 OutputStream out = new FileOutputStream(file);
61 int size = 0;
62 int len = 0;
63 byte[] buf = new byte[1024];
64 while ((size = bin.read(buf)) != -1) {
65 len += size;
66 out.write(buf, 0, size);
67 // 打印下载百分比
68 // log.info("下载了 -> " + len * 100 / fileLength);
69 }
70 bin.close();
71 out.close();
72
73 if (file.length() != fileLength && fileLength != -1) {
74 log.info("file size mismatch: " + fileLength + " -> " + file.length());
75 file = null;
76 }
77 } catch (MalformedURLException e) {
78 e.printStackTrace();
79 file = null;
80 } catch (IOException e) {
81 e.printStackTrace();
82 file = null;
83 }
84 return file;
85
86 }
87
88 public static void main(String[] args) {
89
90 }
91
92 }
1 package com.topdraw.sohu.utils;
2
3 import com.alibaba.fastjson.JSON;
4 import com.alibaba.fastjson.JSONArray;
5 import com.alibaba.fastjson.JSONObject;
6
7 import java.util.ArrayList;
8 import java.util.HashMap;
9 import java.util.List;
10 import java.util.Map;
11
12 public class ImageUtil {
13
14 public static Map<String, Integer> mapImageType = new HashMap<>();
15 static {
16 mapImageType.put("normal", -1);
17 mapImageType.put("thumbnail", 0);
18 mapImageType.put("poster", 1);
19 mapImageType.put("stills", 2);
20 mapImageType.put("icon", 3);
21 mapImageType.put("title", 4);
22 mapImageType.put("ad", 5);
23 mapImageType.put("sketch", 6);
24 mapImageType.put("background", 7);
25 mapImageType.put("channel", 9);
26 mapImageType.put("channel_bw", 10);
27 mapImageType.put("channel_logo", 11);
28 mapImageType.put("channel_name", 12);
29 mapImageType.put("other", 99);
30 }
31
32 public static String convertStrImages2SpecialFormatJSON(String strImages) {
33 JSONArray jsonArr;
34 if (strImages != null) {
35 jsonArr = JSONArray.parseArray(strImages);
36 } else {
37 jsonArr = new JSONArray();
38 }
39 Map<String, Object> mapImages = new HashMap<String, Object>();
40 mapImages.put("map", new HashMap<String, Object>());
41 mapImages.put("list", new ArrayList<Map<String, Object>>());
42
43 for (int i = 0; i < jsonArr.size(); i++) {
44 JSONObject jsonObj = jsonArr.getJSONObject(i);
45 String strKey = ResourceUtil.getImageKeyByType(jsonObj.containsKey("type") ? jsonObj.getIntValue("type") : -1);
46 if (((Map<String, Object>) mapImages.get("map")).containsKey(strKey)) {
47 ((ArrayList<Integer>) ((Map<String, Object>) mapImages.get("map")).get(strKey)).add(i);
48 } else {
49 List<Integer> listIndex = new ArrayList<Integer>();
50 listIndex.add(i);
51 ((Map<String, Object>) mapImages.get("map")).put(strKey, listIndex);
52 }
53 Map<String, Object> mapImageDetail = new HashMap<String, Object>();
54 mapImageDetail.put("id", jsonObj.getLong("id"));
55 mapImageDetail.put("fileUrl", jsonObj.getString("fileUrl"));
56 mapImageDetail.put("height", jsonObj.getInteger("height"));
57 mapImageDetail.put("width", jsonObj.getInteger("width"));
58 mapImageDetail.put("size", jsonObj.getInteger("size"));
59 mapImageDetail.put("extension", jsonObj.getString("extension"));
60 mapImageDetail.put("enable", jsonObj.getBoolean("enable"));
61 mapImageDetail.put("name", jsonObj.getString("name"));
62 mapImageDetail.put("csp_code", jsonObj.getString("csp_code"));
63 ((ArrayList<Map<String, Object>>) mapImages.get("list")).add(mapImageDetail);
64 }
65 return JSON.toJSONString(mapImages);
66 }
67
68
69 }
1 package com.topdraw.sohu.utils;
2
3 import java.util.ArrayList;
4 import java.util.Collections;
5 import java.util.List;
6
7 public class M3U8 {
8 private String strBasePath;
9 private List<Ts> listTs = new ArrayList<>();
10 private long lStartTime;// 开始时间
11 private long lEndTime;// 结束时间
12 private long lStartDownloadTime;// 开始下载时间
13 private long lEndDownloadTime;// 结束下载时间
14
15 public String getStrBasePath() {
16 return strBasePath;
17 }
18
19 public void setStrBasePath(String strBasePath) {
20 this.strBasePath = strBasePath;
21 }
22
23 public List<Ts> getListTs() {
24 return listTs;
25 }
26
27 public void setListTs(List<Ts> listTs) {
28 this.listTs = listTs;
29 }
30
31 public void addTs(Ts ts) {
32 this.listTs.add(ts);
33 }
34
35 public long getLStartDownloadTime() {
36 return lStartDownloadTime;
37 }
38
39 public void setLStartDownloadTime(long lStartDownloadTime) {
40 this.lStartDownloadTime = lStartDownloadTime;
41 }
42
43 public long getLEndDownloadTime() {
44 return lEndDownloadTime;
45 }
46
47 public void setLEndDownloadTime(long lEndDownloadTime) {
48 this.lEndDownloadTime = lEndDownloadTime;
49 }
50
51 /**
52 * 获取开始时间
53 *
54 * @return
55 */
56 public long getLStartTime() {
57 if (listTs.size() > 0) {
58 Collections.sort(listTs);
59 lStartTime = listTs.get(0).getLongDate();
60 return lStartTime;
61 }
62 return 0;
63 }
64
65 /**
66 * 获取结束时间(加上了最后一段时间的持续时间)
67 *
68 * @return
69 */
70 public long getLEndTime() {
71 if (listTs.size() > 0) {
72 Ts tsM3U8 = listTs.get(listTs.size() - 1);
73 lEndTime = tsM3U8.getLongDate() + (long) (tsM3U8.getFSeconds() * 1000);
74 return lEndTime;
75 }
76 return 0;
77 }
78
79 @Override
80 public String toString() {
81 StringBuilder sb = new StringBuilder();
82 sb.append("strBasePath: " + strBasePath);
83 for (Ts ts : listTs) {
84 sb.append("\nts_file_name = " + ts);
85 }
86 sb.append("\n\nlStartTime = " + lStartTime);
87 sb.append("\n\nlEndTime = " + lEndTime);
88 sb.append("\n\nlStartDownloadTime = " + lStartDownloadTime);
89 sb.append("\n\nlEndDownloadTime = " + lEndDownloadTime);
90 return sb.toString();
91 }
92
93 public static class Ts implements Comparable<Ts> {
94 private String strFilePath;
95 private float fSeconds;
96
97 public Ts(String strFilePath, float fSeconds) {
98 this.strFilePath = strFilePath;
99 this.fSeconds = fSeconds;
100 }
101
102 public String getStrFilePath() {
103 return strFilePath;
104 }
105
106 public void setStrFilePath(String strFilePath) {
107 this.strFilePath = strFilePath;
108 }
109
110 public float getFSeconds() {
111 return fSeconds;
112 }
113
114 public void setFSeconds(float fSeconds) {
115 this.fSeconds = fSeconds;
116 }
117
118 @Override
119 public String toString() {
120 return strFilePath + " (" + fSeconds + "sec)";
121 }
122
123 /**
124 * 获取时间
125 */
126 public long getLongDate() {
127 try {
128 return Long.parseLong(strFilePath.substring(0, strFilePath.lastIndexOf(".")));
129 } catch (Exception e) {
130 return 0;
131 }
132 }
133
134 @Override
135 public int compareTo(Ts o) {
136 return strFilePath.compareTo(o.strFilePath);
137 }
138 }
139 }
...\ No newline at end of file ...\ No newline at end of file
1 package com.topdraw.sohu.utils;
2
3 import java.io.IOException;
4 import java.io.InputStream;
5 import java.util.Properties;
6
7 public class PropertiesUtil {
8
9 private static final String NAME = "app.properties";
10 private static Properties p;
11
12 static {
13 p = new Properties();
14 InputStream is = PropertiesUtil.class.getClassLoader()
15 .getResourceAsStream(NAME);
16 try {
17 p.load(is);
18 } catch (IOException e) {
19 e.printStackTrace();
20 }
21 }
22
23 public static String get(String key) {
24 return p.getProperty(key);
25 }
26
27 }
1 package com.topdraw.sohu.utils;
2
3 import java.sql.Connection;
4 import java.util.HashMap;
5 import java.util.Map;
6 import java.util.UUID;
7
8 import org.afflatus.utility.DruidUtil;
9
10 public class ResourceUtil {
11 public static void CreateResourceRecycle(String strFileUrl) {
12 Connection writeConnection = null;
13 try {
14 writeConnection = DruidUtil.getRandomWriteConnection();
15 Map<String, Object> map = new HashMap<String, Object>();
16 map.put("path", strFileUrl);
17 map.put("id", UUID.randomUUID().toString());
18 DruidUtil.beginTransaction(writeConnection);
19 DruidUtil.save(writeConnection, map, "cm_resource_recycle");
20 DruidUtil.commitTransaction(writeConnection);
21 } catch(Exception ex) {
22 ex.printStackTrace();
23 } finally {
24 DruidUtil.close(writeConnection);
25 writeConnection = null;
26 }
27 }
28
29 public static String getImageKeyByType(int iType) {
30 String strType = "normal";
31 switch (iType) {
32 case 0:
33 strType = "thumbnail";
34 break;
35 case 1:
36 strType = "poster";
37 break;
38 case 2:
39 strType = "stills";
40 break;
41 case 3:
42 strType = "icon";
43 break;
44 case 4:
45 strType = "title";
46 break;
47 case 5:
48 strType = "ad";
49 break;
50 case 6:
51 strType = "sketch";
52 break;
53 case 7:
54 strType = "background";
55 break;
56 case 9:
57 strType = "channel";
58 break;
59 case 10:
60 strType = "channel_bw";
61 break;
62 case 11:
63 strType = "channel_logo";
64 break;
65 case 12:
66 strType = "channel_name";
67 break;
68 case 99:
69 strType = "other";
70 break;
71 }
72 return strType;
73 }
74 }
1 package com.topdraw.sohu.utils;
2
3 import org.dom4j.Document;
4 import org.dom4j.DocumentHelper;
5 import org.dom4j.Element;
6 import org.dom4j.io.OutputFormat;
7 import org.dom4j.io.XMLWriter;
8 import org.slf4j.Logger;
9 import org.slf4j.LoggerFactory;
10
11 import java.io.File;
12 import java.io.FileOutputStream;
13 import java.text.SimpleDateFormat;
14 import java.util.Date;
15 import java.util.UUID;
16
17 public class XmlUtils {
18 private static final Logger logger = LoggerFactory.getLogger(XmlUtils.class);
19 public static final String FTP_PATH = "/app/proftpd/smp/";
20 public static final String XML_RELATIVE_PATH = "notify/xml/";
21
22 public String generateNotifyXml(int iResult, String strErrorDescripiton) {
23 String strRet = "";
24 Date date = new Date();
25 SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd");
26 String strDate = sdf.format(date) + "/";
27 String strRelativePath = XML_RELATIVE_PATH + strDate + UUID.randomUUID() + ".xml";
28 String strPath = FTP_PATH + strRelativePath;
29
30 File file = new File(strPath);
31 if (!file.isDirectory()) { // 目录不存在
32 String[] aPathSegments = strPath.split("/");
33 String strWalkThroughPath = "/";
34 for (int i = 0; i < aPathSegments.length - 1; i++) {
35 strWalkThroughPath = strWalkThroughPath + "/" + aPathSegments[i];
36 file = new File(strWalkThroughPath);
37 if (!file.isDirectory()) {
38 file.mkdir();
39 }
40 }
41 }
42
43 Document document = DocumentHelper.createDocument();
44 Element el = document.addElement("xsi:ADI");
45 el.addAttribute("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance");
46 Element elReply = el.addElement("Reply");
47 Element elPropertyResult = elReply.addElement("Property");
48 Element elPropertyDescription = elReply.addElement("Property");
49 elPropertyResult.addAttribute("Name", "Result");
50 elPropertyDescription.addAttribute("Name", "Description");
51 elPropertyResult.setText(iResult + "");
52 elPropertyDescription.setText(strErrorDescripiton);
53
54 OutputFormat format = OutputFormat.createPrettyPrint();
55 format.setEncoding("UTF-8");
56 format.setNewLineAfterDeclaration(false);
57 try {
58 XMLWriter writer = new XMLWriter(new FileOutputStream(strPath), format);
59 writer.setEscapeText(false);
60 writer.write(document);
61 writer.close();
62 } catch (Exception ex) {
63 ex.printStackTrace();
64 }
65 return PropertiesUtil.get("project.ftp_base_url") + strRelativePath;
66
67 }
68
69 public static void main(String[] args) {
70
71 }
72 }
1 Manifest-Version: 1.0
2 Main-Class: com.topdraw.sohu.utils.SohuUtil
3
1 #project
2 project.name = sohu.port
3 project.version = v0.0.1
4
5 #### DB Configuration #################
6 db.read_datasource_count=1
7 db.write_datasource_count=1
8
9 #### Global Configuration #############
10
11 Global.CrossDomainAllowOrigin=*
12 Global.CircumstanceLevel=1
...\ No newline at end of file ...\ No newline at end of file
1 name=druid_read_01
2 driverClassName = com.mysql.jdbc.Driver
3
4 url = jdbc:mysql://139.196.188.223:3306/hyperion?useUnicode=true&characterEncoding=UTF-8&useOldAliasMetadataBehavior=true&useSSL=false&autoReconnect=true
5 username = druid_read
6 password = zyRead!
7
8 initialSize = 5
9 maxActive = 20
10 minIdle = 3
11 autoReconnect=true
12
13 #获取连接时最大等待时间,单位毫秒。配置了maxWait之后,
14 #缺省启用公平锁,并发效率会有所下降,
15 #如果需要可以通过配置useUnfairLock属性为true使用非公平锁。
16 maxWait = 60000
17
18 removeAbandoned = true
19 removeAbandonedTimeout = 3600
20
21 #有两个含义:
22 #1) Destroy线程会检测连接的间隔时间
23 #2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明
24 timeBetweenEvictionRunsMillis = 60000
25 minEvictableIdleTimeMillis = 300000
26 validationQuery = SELECT 1 FROM DUAL
27 #空闲时测试
28 #如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会其作用。
29 testWhileIdle = true
30 testOnBorrow = false
31 testOnReturn = false
32 poolPreparedStatements = false
33 maxPoolPreparedStatementPerConnectionSize = 50
34
35 #属性类型是字符串,通过别名的方式配置扩展插件,
36 #常用的插件有:监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall
37 filters = stat
38
39 #要启用PSCache,必须配置大于0,当大于0时,
40 #poolPreparedStatements自动触发修改为true。
41 maxOpenPreparedStatements = -1
42
43
44
45
46 #物理连接初始化的时候执行的sql
47 #connectionInitSqls=
48
49 #根据dbType自动识别 当数据库抛出一些不可恢复的异常时,抛弃连接
50 #exceptionSorter=
51
1 name=druid_write_01
2 driverClassName=com.mysql.jdbc.Driver
3
4 url = jdbc:mysql://139.196.188.223:3306/hyperion?useUnicode=true&characterEncoding=UTF-8&useOldAliasMetadataBehavior=true&useSSL=false&autoReconnect=true
5 username = druid_write
6 password = zyWrite!
7
8 initialSize=5
9 maxActive=20
10 minIdle=3
11 autoReconnect=true
12
13 #获取连接时最大等待时间,单位毫秒。配置了maxWait之后,
14 #缺省启用公平锁,并发效率会有所下降,
15 #如果需要可以通过配置useUnfairLock属性为true使用非公平锁。
16 maxWait=60000
17
18 removeAbandoned=true
19 removeAbandonedTimeout=3600
20
21 #有两个含义:
22 #1) Destroy线程会检测连接的间隔时间
23 #2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明
24 timeBetweenEvictionRunsMillis=60000
25 minEvictableIdleTimeMillis=300000
26 validationQuery=SELECT 1 FROM DUAL
27 #空闲时测试
28 #如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会其作用。
29 testWhileIdle = true
30 testOnBorrow=false
31 testOnReturn=false
32 poolPreparedStatements=false
33 maxPoolPreparedStatementPerConnectionSize=50
34
35 #属性类型是字符串,通过别名的方式配置扩展插件,
36 #常用的插件有:监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall
37 filters=stat
38
39 #要启用PSCache,必须配置大于0,当大于0时,
40 #poolPreparedStatements自动触发修改为true。
41 maxOpenPreparedStatements = -1
42
43
44
45
46 #物理连接初始化的时候执行的sql
47 #connectionInitSqls=
48
49 #根据dbType自动识别 当数据库抛出一些不可恢复的异常时,抛弃连接
50 #exceptionSorter=
51
1 <?xml version="1.0" encoding="UTF-8"?>
2 <!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
3
4
5 <!-- ========================== 自定义输出格式说明=================test=============== -->
6 <!-- %p 输出优先级,即DEBUG,INFO,WARN,ERROR,FATAL -->
7 <!-- %r 输出自应用启动到输出该log信息耗费的毫秒数 -->
8 <!-- %c 输出所属的类目,通常就是所在类的全名 -->
9 <!-- %t 输出产生该日志事件的线程名 -->
10 <!-- %n 输出一个回车换行符,Windows平台为“/r/n”,Unix平台为“/n” -->
11 <!-- %d 输出日志时间点的日期或时间,默认格式为ISO8601,也可以在其后指定格式,比如:%d{yyy MMM dd HH:mm:ss,SSS},输出类似:2002年10月18日
12 22:10:28,921 -->
13 <!-- %l 输出日志事件的发生位置,包括类目名、发生的线程,以及在代码中的行数。举例:Testlog4.main(TestLog4.java:10) -->
14 <!-- ========================================================================== -->
15 <!-- ========================== 输出方式说明================================ -->
16 <!-- Log4j提供的appender有以下几种: -->
17 <!-- org.apache.log4j.ConsoleAppender(控制台), -->
18 <!-- org.apache.log4j.FileAppender(文件), -->
19 <!-- org.apache.log4j.DailyRollingFileAppender(每天产生一个日志文件), -->
20 <!-- org.apache.log4j.RollingFileAppender(文件大小到达指定尺寸的时候产生一个新的文件), -->
21 <!-- org.apache.log4j.WriterAppender(将日志信息以流格式发送到任意指定的地方) -->
22 <!-- ========================================================================== -->
23
24
25 <log4j:configuration xmlns:log4j='http://jakarta.apache.org/log4j/'>
26 <appender name="cAppender" class="org.apache.log4j.ConsoleAppender">
27 <layout class="org.apache.log4j.PatternLayout">
28 <param name="ConversionPattern" value="[%d{dd HH:mm:ss,SSS} %-5p] [%t] %c{1}:%m%n" />
29 </layout>
30 <!--过滤器设置输出的级别 -->
31 <filter class="org.apache.log4j.varia.LevelRangeFilter">
32 <param name="levelMin" value="debug" />
33 <param name="levelMax" value="error" />
34 <param name="AcceptOnMatch" value="true" />
35 </filter>
36 </appender>
37
38 <!-- <appender name="rfAppender" class="org.apache.log4j.RollingFileAppender">
39 <param name="File" value="./log/sohu.port.log" /> <param name="Append"
40 value="true" /> <param name="MaxBackupIndex" value="10" /> <layout class="org.apache.log4j.PatternLayout">
41 <param name="ConversionPattern" value="%p (%c:%L)- %m%n" /> </layout> </appender> -->
42
43 <appender name="drfInfoAppender" class="org.apache.log4j.DailyRollingFileAppender">
44 <param name="File" value="${catalina.base}/logs/sohu.port.info" />
45 <param name="DatePattern" value="'.'yyyy-MM-dd'.log'" />
46 <layout class="org.apache.log4j.PatternLayout">
47 <param name="ConversionPattern"
48 value="[%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p] [%t] %c{1}:%m%n" />
49 </layout>
50 <filter class="org.apache.log4j.varia.LevelRangeFilter">
51 <param name="levelMin" value="info" />
52 <param name="levelMax" value="error" />
53 <param name="AcceptOnMatch" value="true" />
54 </filter>
55 </appender>
56
57 <appender name="drfErrorAppender" class="org.apache.log4j.DailyRollingFileAppender">
58 <param name="File" value="${catalina.base}/logs/sohu.port.error" />
59 <param name="DatePattern" value="'.'yyyy-MM-dd'.log'" />
60 <layout class="org.apache.log4j.PatternLayout">
61 <param name="ConversionPattern"
62 value="[%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p] [%t] %c{1}:%m%n" />
63 </layout>
64 <filter class="org.apache.log4j.varia.LevelRangeFilter">
65 <param name="levelMin" value="warning" />
66 <param name="levelMax" value="error" />
67 <param name="AcceptOnMatch" value="true" />
68 </filter>
69
70 </appender>
71
72
73
74 <!-- 指定logger的设置,additivity指示是否遵循缺省的继承机制 <logger name="test.TaskCommandHandler"
75 additivity="false"> <priority value ="info"/> <appender-ref ref="cAppender"
76 /> <appender-ref ref="drfAppender" /> </logger> -->
77
78 <!-- 根logger的设置 -->
79 <root>
80 <priority value="info" />
81 <appender-ref ref="cAppender" />
82 <appender-ref ref="drfInfoAppender" />
83 <!--<appender-ref ref="drfErrorAppender"/> -->
84
85 </root>
86 </log4j:configuration>
...\ No newline at end of file ...\ No newline at end of file
1 org.quartz.scheduler.instanceName = Sohu Interface Scheduler
2 org.quartz.scheduler.instanceId = 01
3
4 org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool
5 org.quartz.threadPool.threadCount = 10
6
7 org.quartz.jobStore.class = org.quartz.simpl.RAMJobStore
8
9 org.quartz.plugin.jobInitializer.class = org.quartz.plugins.xml.XMLSchedulingDataProcessorPlugin
10 org.quartz.plugin.jobInitializer.fileNames = quartz_jobs.xml
11 org.quartz.plugin.jobInitializer.failOnFileNotFound = true
12
13 org.quartz.scheduler.jmx.export = true
...\ No newline at end of file ...\ No newline at end of file
1 <?xml version='1.0' encoding='utf-8'?>
2 <job-scheduling-data
3 xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData"
4 xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
5 xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_2_0.xsd"
6 version="2.0">
7 <schedule>
8 <job>
9 <name>M3U8Job</name>
10 <group>M3U8JobGroup</group>
11 <description>M3U8Job</description>
12 <job-class>com.topdraw.sohu.port.job.M3U8Job</job-class>
13 </job>
14 <trigger>
15 <simple>
16 <name>M3U8JobTrigger</name>
17 <group>M3U8JobGroup</group>
18 <job-name>M3U8Job</job-name>
19 <job-group>M3U8JobGroup</job-group>
20 <start-time-seconds-in-future>5</start-time-seconds-in-future>
21 <repeat-count>-1</repeat-count>
22 <repeat-interval>30000</repeat-interval>
23 </simple>
24 </trigger>
25
26 <job>
27 <name>MetaDataJob</name>
28 <group>MetaDataJobGroup</group>
29 <description>MetaDataJob</description>
30 <job-class>com.topdraw.sohu.port.job.MetaDataJob</job-class>
31 </job>
32 <trigger>
33 <cron>
34 <name>MetaDataJobTrigger</name>
35 <group>MetaDataJobGroup</group>
36 <job-name>MetaDataJob</job-name>
37 <job-group>MetaDataJobGroup</job-group>
38 <cron-expression>0 55 * * * ?</cron-expression>
39 </cron>
40 </trigger>
41 </schedule>
42 </job-scheduling-data>
...\ No newline at end of file ...\ No newline at end of file
1 <!DOCTYPE web-app PUBLIC
2 "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
3 "http://java.sun.com/dtd/web-app_2_3.dtd" >
4
5 <web-app>
6 <display-name>Archetype Created Web Application</display-name>
7 </web-app>
1 <html>
2 <body>
3 <h2>Hello World!</h2>
4 </body>
5 </html>
1 Manifest-Version: 1.0
2 Main-Class: com.topdraw.sohu.utils.SohuUtil
3
1 #project
2 project.name = sohu.port
3 project.version = v0.0.1
4
5 #### DB Configuration #################
6 db.read_datasource_count=1
7 db.write_datasource_count=1
8
9 #### Global Configuration #############
10
11 Global.CrossDomainAllowOrigin=*
12 Global.CircumstanceLevel=1
...\ No newline at end of file ...\ No newline at end of file
1 name=druid_read_01
2 driverClassName = com.mysql.jdbc.Driver
3
4 url = jdbc:mysql://139.196.188.223:3306/hyperion?useUnicode=true&characterEncoding=UTF-8&useOldAliasMetadataBehavior=true&useSSL=false&autoReconnect=true
5 username = druid_read
6 password = zyRead!
7
8 initialSize = 5
9 maxActive = 20
10 minIdle = 3
11 autoReconnect=true
12
13 #获取连接时最大等待时间,单位毫秒。配置了maxWait之后,
14 #缺省启用公平锁,并发效率会有所下降,
15 #如果需要可以通过配置useUnfairLock属性为true使用非公平锁。
16 maxWait = 60000
17
18 removeAbandoned = true
19 removeAbandonedTimeout = 3600
20
21 #有两个含义:
22 #1) Destroy线程会检测连接的间隔时间
23 #2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明
24 timeBetweenEvictionRunsMillis = 60000
25 minEvictableIdleTimeMillis = 300000
26 validationQuery = SELECT 1 FROM DUAL
27 #空闲时测试
28 #如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会其作用。
29 testWhileIdle = true
30 testOnBorrow = false
31 testOnReturn = false
32 poolPreparedStatements = false
33 maxPoolPreparedStatementPerConnectionSize = 50
34
35 #属性类型是字符串,通过别名的方式配置扩展插件,
36 #常用的插件有:监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall
37 filters = stat
38
39 #要启用PSCache,必须配置大于0,当大于0时,
40 #poolPreparedStatements自动触发修改为true。
41 maxOpenPreparedStatements = -1
42
43
44
45
46 #物理连接初始化的时候执行的sql
47 #connectionInitSqls=
48
49 #根据dbType自动识别 当数据库抛出一些不可恢复的异常时,抛弃连接
50 #exceptionSorter=
51
1 name=druid_write_01
2 driverClassName=com.mysql.jdbc.Driver
3
4 url = jdbc:mysql://139.196.188.223:3306/hyperion?useUnicode=true&characterEncoding=UTF-8&useOldAliasMetadataBehavior=true&useSSL=false&autoReconnect=true
5 username = druid_write
6 password = zyWrite!
7
8 initialSize=5
9 maxActive=20
10 minIdle=3
11 autoReconnect=true
12
13 #获取连接时最大等待时间,单位毫秒。配置了maxWait之后,
14 #缺省启用公平锁,并发效率会有所下降,
15 #如果需要可以通过配置useUnfairLock属性为true使用非公平锁。
16 maxWait=60000
17
18 removeAbandoned=true
19 removeAbandonedTimeout=3600
20
21 #有两个含义:
22 #1) Destroy线程会检测连接的间隔时间
23 #2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明
24 timeBetweenEvictionRunsMillis=60000
25 minEvictableIdleTimeMillis=300000
26 validationQuery=SELECT 1 FROM DUAL
27 #空闲时测试
28 #如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会其作用。
29 testWhileIdle = true
30 testOnBorrow=false
31 testOnReturn=false
32 poolPreparedStatements=false
33 maxPoolPreparedStatementPerConnectionSize=50
34
35 #属性类型是字符串,通过别名的方式配置扩展插件,
36 #常用的插件有:监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall
37 filters=stat
38
39 #要启用PSCache,必须配置大于0,当大于0时,
40 #poolPreparedStatements自动触发修改为true。
41 maxOpenPreparedStatements = -1
42
43
44
45
46 #物理连接初始化的时候执行的sql
47 #connectionInitSqls=
48
49 #根据dbType自动识别 当数据库抛出一些不可恢复的异常时,抛弃连接
50 #exceptionSorter=
51
1 <?xml version="1.0" encoding="UTF-8"?>
2 <!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
3
4
5 <!-- ========================== 自定义输出格式说明=================test=============== -->
6 <!-- %p 输出优先级,即DEBUG,INFO,WARN,ERROR,FATAL -->
7 <!-- %r 输出自应用启动到输出该log信息耗费的毫秒数 -->
8 <!-- %c 输出所属的类目,通常就是所在类的全名 -->
9 <!-- %t 输出产生该日志事件的线程名 -->
10 <!-- %n 输出一个回车换行符,Windows平台为“/r/n”,Unix平台为“/n” -->
11 <!-- %d 输出日志时间点的日期或时间,默认格式为ISO8601,也可以在其后指定格式,比如:%d{yyy MMM dd HH:mm:ss,SSS},输出类似:2002年10月18日
12 22:10:28,921 -->
13 <!-- %l 输出日志事件的发生位置,包括类目名、发生的线程,以及在代码中的行数。举例:Testlog4.main(TestLog4.java:10) -->
14 <!-- ========================================================================== -->
15 <!-- ========================== 输出方式说明================================ -->
16 <!-- Log4j提供的appender有以下几种: -->
17 <!-- org.apache.log4j.ConsoleAppender(控制台), -->
18 <!-- org.apache.log4j.FileAppender(文件), -->
19 <!-- org.apache.log4j.DailyRollingFileAppender(每天产生一个日志文件), -->
20 <!-- org.apache.log4j.RollingFileAppender(文件大小到达指定尺寸的时候产生一个新的文件), -->
21 <!-- org.apache.log4j.WriterAppender(将日志信息以流格式发送到任意指定的地方) -->
22 <!-- ========================================================================== -->
23
24
25 <log4j:configuration xmlns:log4j='http://jakarta.apache.org/log4j/'>
26 <appender name="cAppender" class="org.apache.log4j.ConsoleAppender">
27 <layout class="org.apache.log4j.PatternLayout">
28 <param name="ConversionPattern" value="[%d{dd HH:mm:ss,SSS} %-5p] [%t] %c{1}:%m%n" />
29 </layout>
30 <!--过滤器设置输出的级别 -->
31 <filter class="org.apache.log4j.varia.LevelRangeFilter">
32 <param name="levelMin" value="debug" />
33 <param name="levelMax" value="error" />
34 <param name="AcceptOnMatch" value="true" />
35 </filter>
36 </appender>
37
38 <!-- <appender name="rfAppender" class="org.apache.log4j.RollingFileAppender">
39 <param name="File" value="./log/sohu.port.log" /> <param name="Append"
40 value="true" /> <param name="MaxBackupIndex" value="10" /> <layout class="org.apache.log4j.PatternLayout">
41 <param name="ConversionPattern" value="%p (%c:%L)- %m%n" /> </layout> </appender> -->
42
43 <appender name="drfInfoAppender" class="org.apache.log4j.DailyRollingFileAppender">
44 <param name="File" value="${catalina.base}/logs/sohu.port.info" />
45 <param name="DatePattern" value="'.'yyyy-MM-dd'.log'" />
46 <layout class="org.apache.log4j.PatternLayout">
47 <param name="ConversionPattern"
48 value="[%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p] [%t] %c{1}:%m%n" />
49 </layout>
50 <filter class="org.apache.log4j.varia.LevelRangeFilter">
51 <param name="levelMin" value="info" />
52 <param name="levelMax" value="error" />
53 <param name="AcceptOnMatch" value="true" />
54 </filter>
55 </appender>
56
57 <appender name="drfErrorAppender" class="org.apache.log4j.DailyRollingFileAppender">
58 <param name="File" value="${catalina.base}/logs/sohu.port.error" />
59 <param name="DatePattern" value="'.'yyyy-MM-dd'.log'" />
60 <layout class="org.apache.log4j.PatternLayout">
61 <param name="ConversionPattern"
62 value="[%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p] [%t] %c{1}:%m%n" />
63 </layout>
64 <filter class="org.apache.log4j.varia.LevelRangeFilter">
65 <param name="levelMin" value="warning" />
66 <param name="levelMax" value="error" />
67 <param name="AcceptOnMatch" value="true" />
68 </filter>
69
70 </appender>
71
72
73
74 <!-- 指定logger的设置,additivity指示是否遵循缺省的继承机制 <logger name="test.TaskCommandHandler"
75 additivity="false"> <priority value ="info"/> <appender-ref ref="cAppender"
76 /> <appender-ref ref="drfAppender" /> </logger> -->
77
78 <!-- 根logger的设置 -->
79 <root>
80 <priority value="info" />
81 <appender-ref ref="cAppender" />
82 <appender-ref ref="drfInfoAppender" />
83 <!--<appender-ref ref="drfErrorAppender"/> -->
84
85 </root>
86 </log4j:configuration>
...\ No newline at end of file ...\ No newline at end of file
1 org.quartz.scheduler.instanceName = Sohu Interface Scheduler
2 org.quartz.scheduler.instanceId = 01
3
4 org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool
5 org.quartz.threadPool.threadCount = 10
6
7 org.quartz.jobStore.class = org.quartz.simpl.RAMJobStore
8
9 org.quartz.plugin.jobInitializer.class = org.quartz.plugins.xml.XMLSchedulingDataProcessorPlugin
10 org.quartz.plugin.jobInitializer.fileNames = quartz_jobs.xml
11 org.quartz.plugin.jobInitializer.failOnFileNotFound = true
12
13 org.quartz.scheduler.jmx.export = true
...\ No newline at end of file ...\ No newline at end of file
1 <?xml version='1.0' encoding='utf-8'?>
2 <job-scheduling-data
3 xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData"
4 xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
5 xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_2_0.xsd"
6 version="2.0">
7 <schedule>
8 <job>
9 <name>M3U8Job</name>
10 <group>M3U8JobGroup</group>
11 <description>M3U8Job</description>
12 <job-class>com.topdraw.sohu.port.job.M3U8Job</job-class>
13 </job>
14 <trigger>
15 <simple>
16 <name>M3U8JobTrigger</name>
17 <group>M3U8JobGroup</group>
18 <job-name>M3U8Job</job-name>
19 <job-group>M3U8JobGroup</job-group>
20 <start-time-seconds-in-future>5</start-time-seconds-in-future>
21 <repeat-count>-1</repeat-count>
22 <repeat-interval>30000</repeat-interval>
23 </simple>
24 </trigger>
25
26 <job>
27 <name>MetaDataJob</name>
28 <group>MetaDataJobGroup</group>
29 <description>MetaDataJob</description>
30 <job-class>com.topdraw.sohu.port.job.MetaDataJob</job-class>
31 </job>
32 <trigger>
33 <cron>
34 <name>MetaDataJobTrigger</name>
35 <group>MetaDataJobGroup</group>
36 <job-name>MetaDataJob</job-name>
37 <job-group>MetaDataJobGroup</job-group>
38 <cron-expression>0 55 * * * ?</cron-expression>
39 </cron>
40 </trigger>
41 </schedule>
42 </job-scheduling-data>
...\ No newline at end of file ...\ No newline at end of file
No preview for this file type
1 Manifest-Version: 1.0
2 Built-By: cheng
3 Created-By: IntelliJ IDEA
4 Build-Jdk: 1.8.0_121
5
1 Manifest-Version: 1.0
2 Main-Class: com.topdraw.sohu.utils.SohuUtil
3
1 #project
2 project.name = sohu.port
3 project.version = v0.0.1
4
5 #### DB Configuration #################
6 db.read_datasource_count=1
7 db.write_datasource_count=1
8
9 #### Global Configuration #############
10
11 Global.CrossDomainAllowOrigin=*
12 Global.CircumstanceLevel=1
...\ No newline at end of file ...\ No newline at end of file
1 name=druid_read_01
2 driverClassName = com.mysql.jdbc.Driver
3
4 url = jdbc:mysql://139.196.188.223:3306/hyperion?useUnicode=true&characterEncoding=UTF-8&useOldAliasMetadataBehavior=true&useSSL=false&autoReconnect=true
5 username = druid_read
6 password = zyRead!
7
8 initialSize = 5
9 maxActive = 20
10 minIdle = 3
11 autoReconnect=true
12
13 #获取连接时最大等待时间,单位毫秒。配置了maxWait之后,
14 #缺省启用公平锁,并发效率会有所下降,
15 #如果需要可以通过配置useUnfairLock属性为true使用非公平锁。
16 maxWait = 60000
17
18 removeAbandoned = true
19 removeAbandonedTimeout = 3600
20
21 #有两个含义:
22 #1) Destroy线程会检测连接的间隔时间
23 #2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明
24 timeBetweenEvictionRunsMillis = 60000
25 minEvictableIdleTimeMillis = 300000
26 validationQuery = SELECT 1 FROM DUAL
27 #空闲时测试
28 #如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会其作用。
29 testWhileIdle = true
30 testOnBorrow = false
31 testOnReturn = false
32 poolPreparedStatements = false
33 maxPoolPreparedStatementPerConnectionSize = 50
34
35 #属性类型是字符串,通过别名的方式配置扩展插件,
36 #常用的插件有:监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall
37 filters = stat
38
39 #要启用PSCache,必须配置大于0,当大于0时,
40 #poolPreparedStatements自动触发修改为true。
41 maxOpenPreparedStatements = -1
42
43
44
45
46 #物理连接初始化的时候执行的sql
47 #connectionInitSqls=
48
49 #根据dbType自动识别 当数据库抛出一些不可恢复的异常时,抛弃连接
50 #exceptionSorter=
51
1 name=druid_write_01
2 driverClassName=com.mysql.jdbc.Driver
3
4 url = jdbc:mysql://139.196.188.223:3306/hyperion?useUnicode=true&characterEncoding=UTF-8&useOldAliasMetadataBehavior=true&useSSL=false&autoReconnect=true
5 username = druid_write
6 password = zyWrite!
7
8 initialSize=5
9 maxActive=20
10 minIdle=3
11 autoReconnect=true
12
13 #获取连接时最大等待时间,单位毫秒。配置了maxWait之后,
14 #缺省启用公平锁,并发效率会有所下降,
15 #如果需要可以通过配置useUnfairLock属性为true使用非公平锁。
16 maxWait=60000
17
18 removeAbandoned=true
19 removeAbandonedTimeout=3600
20
21 #有两个含义:
22 #1) Destroy线程会检测连接的间隔时间
23 #2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明
24 timeBetweenEvictionRunsMillis=60000
25 minEvictableIdleTimeMillis=300000
26 validationQuery=SELECT 1 FROM DUAL
27 #空闲时测试
28 #如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会其作用。
29 testWhileIdle = true
30 testOnBorrow=false
31 testOnReturn=false
32 poolPreparedStatements=false
33 maxPoolPreparedStatementPerConnectionSize=50
34
35 #属性类型是字符串,通过别名的方式配置扩展插件,
36 #常用的插件有:监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall
37 filters=stat
38
39 #要启用PSCache,必须配置大于0,当大于0时,
40 #poolPreparedStatements自动触发修改为true。
41 maxOpenPreparedStatements = -1
42
43
44
45
46 #物理连接初始化的时候执行的sql
47 #connectionInitSqls=
48
49 #根据dbType自动识别 当数据库抛出一些不可恢复的异常时,抛弃连接
50 #exceptionSorter=
51
1 <?xml version="1.0" encoding="UTF-8"?>
2 <!DOCTYPE log4j:configuration SYSTEM "log4j.dtd">
3
4
5 <!-- ========================== 自定义输出格式说明=================test=============== -->
6 <!-- %p 输出优先级,即DEBUG,INFO,WARN,ERROR,FATAL -->
7 <!-- %r 输出自应用启动到输出该log信息耗费的毫秒数 -->
8 <!-- %c 输出所属的类目,通常就是所在类的全名 -->
9 <!-- %t 输出产生该日志事件的线程名 -->
10 <!-- %n 输出一个回车换行符,Windows平台为“/r/n”,Unix平台为“/n” -->
11 <!-- %d 输出日志时间点的日期或时间,默认格式为ISO8601,也可以在其后指定格式,比如:%d{yyy MMM dd HH:mm:ss,SSS},输出类似:2002年10月18日
12 22:10:28,921 -->
13 <!-- %l 输出日志事件的发生位置,包括类目名、发生的线程,以及在代码中的行数。举例:Testlog4.main(TestLog4.java:10) -->
14 <!-- ========================================================================== -->
15 <!-- ========================== 输出方式说明================================ -->
16 <!-- Log4j提供的appender有以下几种: -->
17 <!-- org.apache.log4j.ConsoleAppender(控制台), -->
18 <!-- org.apache.log4j.FileAppender(文件), -->
19 <!-- org.apache.log4j.DailyRollingFileAppender(每天产生一个日志文件), -->
20 <!-- org.apache.log4j.RollingFileAppender(文件大小到达指定尺寸的时候产生一个新的文件), -->
21 <!-- org.apache.log4j.WriterAppender(将日志信息以流格式发送到任意指定的地方) -->
22 <!-- ========================================================================== -->
23
24
25 <log4j:configuration xmlns:log4j='http://jakarta.apache.org/log4j/'>
26 <appender name="cAppender" class="org.apache.log4j.ConsoleAppender">
27 <layout class="org.apache.log4j.PatternLayout">
28 <param name="ConversionPattern" value="[%d{dd HH:mm:ss,SSS} %-5p] [%t] %c{1}:%m%n" />
29 </layout>
30 <!--过滤器设置输出的级别 -->
31 <filter class="org.apache.log4j.varia.LevelRangeFilter">
32 <param name="levelMin" value="debug" />
33 <param name="levelMax" value="error" />
34 <param name="AcceptOnMatch" value="true" />
35 </filter>
36 </appender>
37
38 <!-- <appender name="rfAppender" class="org.apache.log4j.RollingFileAppender">
39 <param name="File" value="./log/sohu.port.log" /> <param name="Append"
40 value="true" /> <param name="MaxBackupIndex" value="10" /> <layout class="org.apache.log4j.PatternLayout">
41 <param name="ConversionPattern" value="%p (%c:%L)- %m%n" /> </layout> </appender> -->
42
43 <appender name="drfInfoAppender" class="org.apache.log4j.DailyRollingFileAppender">
44 <param name="File" value="${catalina.base}/logs/sohu.port.info" />
45 <param name="DatePattern" value="'.'yyyy-MM-dd'.log'" />
46 <layout class="org.apache.log4j.PatternLayout">
47 <param name="ConversionPattern"
48 value="[%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p] [%t] %c{1}:%m%n" />
49 </layout>
50 <filter class="org.apache.log4j.varia.LevelRangeFilter">
51 <param name="levelMin" value="info" />
52 <param name="levelMax" value="error" />
53 <param name="AcceptOnMatch" value="true" />
54 </filter>
55 </appender>
56
57 <appender name="drfErrorAppender" class="org.apache.log4j.DailyRollingFileAppender">
58 <param name="File" value="${catalina.base}/logs/sohu.port.error" />
59 <param name="DatePattern" value="'.'yyyy-MM-dd'.log'" />
60 <layout class="org.apache.log4j.PatternLayout">
61 <param name="ConversionPattern"
62 value="[%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p] [%t] %c{1}:%m%n" />
63 </layout>
64 <filter class="org.apache.log4j.varia.LevelRangeFilter">
65 <param name="levelMin" value="warning" />
66 <param name="levelMax" value="error" />
67 <param name="AcceptOnMatch" value="true" />
68 </filter>
69
70 </appender>
71
72
73
74 <!-- 指定logger的设置,additivity指示是否遵循缺省的继承机制 <logger name="test.TaskCommandHandler"
75 additivity="false"> <priority value ="info"/> <appender-ref ref="cAppender"
76 /> <appender-ref ref="drfAppender" /> </logger> -->
77
78 <!-- 根logger的设置 -->
79 <root>
80 <priority value="info" />
81 <appender-ref ref="cAppender" />
82 <appender-ref ref="drfInfoAppender" />
83 <!--<appender-ref ref="drfErrorAppender"/> -->
84
85 </root>
86 </log4j:configuration>
...\ No newline at end of file ...\ No newline at end of file
1 org.quartz.scheduler.instanceName = Sohu Interface Scheduler
2 org.quartz.scheduler.instanceId = 01
3
4 org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool
5 org.quartz.threadPool.threadCount = 10
6
7 org.quartz.jobStore.class = org.quartz.simpl.RAMJobStore
8
9 org.quartz.plugin.jobInitializer.class = org.quartz.plugins.xml.XMLSchedulingDataProcessorPlugin
10 org.quartz.plugin.jobInitializer.fileNames = quartz_jobs.xml
11 org.quartz.plugin.jobInitializer.failOnFileNotFound = true
12
13 org.quartz.scheduler.jmx.export = true
...\ No newline at end of file ...\ No newline at end of file
1 <?xml version='1.0' encoding='utf-8'?>
2 <job-scheduling-data
3 xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData"
4 xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
5 xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_2_0.xsd"
6 version="2.0">
7 <schedule>
8 <job>
9 <name>M3U8Job</name>
10 <group>M3U8JobGroup</group>
11 <description>M3U8Job</description>
12 <job-class>com.topdraw.sohu.port.job.M3U8Job</job-class>
13 </job>
14 <trigger>
15 <simple>
16 <name>M3U8JobTrigger</name>
17 <group>M3U8JobGroup</group>
18 <job-name>M3U8Job</job-name>
19 <job-group>M3U8JobGroup</job-group>
20 <start-time-seconds-in-future>5</start-time-seconds-in-future>
21 <repeat-count>-1</repeat-count>
22 <repeat-interval>30000</repeat-interval>
23 </simple>
24 </trigger>
25
26 <job>
27 <name>MetaDataJob</name>
28 <group>MetaDataJobGroup</group>
29 <description>MetaDataJob</description>
30 <job-class>com.topdraw.sohu.port.job.MetaDataJob</job-class>
31 </job>
32 <trigger>
33 <cron>
34 <name>MetaDataJobTrigger</name>
35 <group>MetaDataJobGroup</group>
36 <job-name>MetaDataJob</job-name>
37 <job-group>MetaDataJobGroup</job-group>
38 <cron-expression>0 55 * * * ?</cron-expression>
39 </cron>
40 </trigger>
41 </schedule>
42 </job-scheduling-data>
...\ No newline at end of file ...\ No newline at end of file
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
1 <!DOCTYPE web-app PUBLIC
2 "-//Sun Microsystems, Inc.//DTD Web Application 2.3//EN"
3 "http://java.sun.com/dtd/web-app_2_3.dtd" >
4
5 <web-app>
6 <display-name>Archetype Created Web Application</display-name>
7 </web-app>
1 <html>
2 <body>
3 <h2>Hello World!</h2>
4 </body>
5 </html>