initialize
0 parents
Showing
97 changed files
with
3136 additions
and
0 deletions
.gitignore
0 → 100644
225.txt
0 → 100644
1 | /prepared/m3u8/20180608/drama/4730_1.m3u8 | ||
2 | /prepared/m3u8/20180608/drama/4946_1.m3u8 | ||
3 | /prepared/m3u8/20180608/drama/4734_1.m3u8 | ||
4 | /prepared/m3u8/20180608/drama/4948_1.m3u8 | ||
5 | /prepared/m3u8/20180608/drama/4760_1.m3u8 | ||
6 | /prepared/m3u8/20180608/drama/4772_1.m3u8 | ||
7 | /prepared/m3u8/20180608/drama/4806_1.m3u8 | ||
8 | /prepared/m3u8/20180608/drama/4976_1.m3u8 |
out/artifacts/port_jar/port.jar
0 → 100644
No preview for this file type
pom.xml
0 → 100644
1 | <?xml version="1.0" encoding="UTF-8"?> | ||
2 | |||
3 | <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||
4 | xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | ||
5 | <modelVersion>4.0.0</modelVersion> | ||
6 | |||
7 | <groupId>com.topdraw.sohu</groupId> | ||
8 | <artifactId>port</artifactId> | ||
9 | <version>1.0-SNAPSHOT</version> | ||
10 | <packaging>war</packaging> | ||
11 | |||
12 | <name>port Maven Webapp</name> | ||
13 | |||
14 | <dependencies> | ||
15 | <dependency> | ||
16 | <groupId>org.afflatus</groupId> | ||
17 | <artifactId>utility</artifactId> | ||
18 | <version>1.0.3</version> | ||
19 | </dependency> | ||
20 | <dependency> | ||
21 | <groupId>com.alibaba</groupId> | ||
22 | <artifactId>druid</artifactId> | ||
23 | <version>1.1.6</version> | ||
24 | </dependency> | ||
25 | <dependency> | ||
26 | <groupId>log4j</groupId> | ||
27 | <artifactId>log4j</artifactId> | ||
28 | <version>1.2.17</version> | ||
29 | </dependency> | ||
30 | <dependency> | ||
31 | <groupId>org.slf4j</groupId> | ||
32 | <artifactId>slf4j-log4j12</artifactId> | ||
33 | <version>1.7.21</version> | ||
34 | </dependency> | ||
35 | <dependency> | ||
36 | <groupId>com.alibaba</groupId> | ||
37 | <artifactId>fastjson</artifactId> | ||
38 | <version>1.2.20</version> | ||
39 | </dependency> | ||
40 | <dependency> | ||
41 | <groupId>mysql</groupId> | ||
42 | <artifactId>mysql-connector-java</artifactId> | ||
43 | <version>5.1.26</version> | ||
44 | </dependency> | ||
45 | <dependency> | ||
46 | <groupId>com.thoughtworks.xstream</groupId> | ||
47 | <artifactId>xstream</artifactId> | ||
48 | <version>1.4.7</version> | ||
49 | </dependency> | ||
50 | <dependency> | ||
51 | <groupId>org.quartz-scheduler</groupId> | ||
52 | <artifactId>quartz</artifactId> | ||
53 | <version>2.2.3</version> | ||
54 | </dependency> | ||
55 | <dependency> | ||
56 | <groupId>org.quartz-scheduler</groupId> | ||
57 | <artifactId>quartz-jobs</artifactId> | ||
58 | <version>2.2.3</version> | ||
59 | </dependency> | ||
60 | <dependency> | ||
61 | <groupId>javax.transaction</groupId> | ||
62 | <artifactId>jta</artifactId> | ||
63 | <version>1.1</version> | ||
64 | </dependency> | ||
65 | <dependency> | ||
66 | <groupId>commons-net</groupId> | ||
67 | <artifactId>commons-net</artifactId> | ||
68 | <version>3.3</version> | ||
69 | </dependency> | ||
70 | <dependency> | ||
71 | <groupId>org.dom4j</groupId> | ||
72 | <artifactId>dom4j</artifactId> | ||
73 | <version>2.0.0</version> | ||
74 | </dependency> | ||
75 | </dependencies> | ||
76 | |||
77 | <build> | ||
78 | <finalName>port</finalName> | ||
79 | |||
80 | <plugins> | ||
81 | <plugin> | ||
82 | <groupId>org.apache.maven.plugins</groupId> | ||
83 | <artifactId>maven-compiler-plugin</artifactId> | ||
84 | <version>3.1</version> | ||
85 | <configuration> | ||
86 | <source>1.8</source> | ||
87 | <target>1.8</target> | ||
88 | </configuration> | ||
89 | </plugin> | ||
90 | |||
91 | <plugin> | ||
92 | <groupId>org.apache.maven.plugins</groupId> | ||
93 | <artifactId>maven-dependency-plugin</artifactId> | ||
94 | <executions> | ||
95 | <execution> | ||
96 | <id>copy</id> | ||
97 | <phase>package</phase> | ||
98 | <goals> | ||
99 | <goal>copy-dependencies</goal> | ||
100 | </goals> | ||
101 | <configuration> | ||
102 | <outputDirectory> | ||
103 | ./target/lib | ||
104 | </outputDirectory> | ||
105 | </configuration> | ||
106 | </execution> | ||
107 | </executions> | ||
108 | </plugin> | ||
109 | </plugins> | ||
110 | |||
111 | <pluginManagement><!-- lock down plugins versions to avoid using Maven defaults (may be moved to parent pom) --> | ||
112 | <plugins> | ||
113 | <plugin> | ||
114 | <artifactId>maven-clean-plugin</artifactId> | ||
115 | <version>3.0.0</version> | ||
116 | </plugin> | ||
117 | <!-- see http://maven.apache.org/ref/current/maven-core/default-bindings.html#Plugin_bindings_for_war_packaging --> | ||
118 | <plugin> | ||
119 | <artifactId>maven-resources-plugin</artifactId> | ||
120 | <version>3.0.2</version> | ||
121 | </plugin> | ||
122 | <plugin> | ||
123 | <artifactId>maven-compiler-plugin</artifactId> | ||
124 | <version>3.7.0</version> | ||
125 | </plugin> | ||
126 | <plugin> | ||
127 | <artifactId>maven-surefire-plugin</artifactId> | ||
128 | <version>2.20.1</version> | ||
129 | </plugin> | ||
130 | <plugin> | ||
131 | <artifactId>maven-war-plugin</artifactId> | ||
132 | <version>3.2.0</version> | ||
133 | </plugin> | ||
134 | <plugin> | ||
135 | <artifactId>maven-install-plugin</artifactId> | ||
136 | <version>2.5.2</version> | ||
137 | </plugin> | ||
138 | <plugin> | ||
139 | <artifactId>maven-deploy-plugin</artifactId> | ||
140 | <version>2.8.2</version> | ||
141 | </plugin> | ||
142 | </plugins> | ||
143 | </pluginManagement> | ||
144 | </build> | ||
145 | </project> |
1 | package com.topdraw.sohu.listener; | ||
2 | |||
3 | import com.mysql.jdbc.AbandonedConnectionCleanupThread; | ||
4 | import com.topdraw.sohu.utils.PropertiesUtil; | ||
5 | import org.afflatus.utility.DruidUtil; | ||
6 | import org.afflatus.utility.MemcachedClient; | ||
7 | import org.quartz.Scheduler; | ||
8 | import org.quartz.SchedulerException; | ||
9 | import org.quartz.SchedulerFactory; | ||
10 | import org.quartz.impl.StdSchedulerFactory; | ||
11 | import org.slf4j.Logger; | ||
12 | import org.slf4j.LoggerFactory; | ||
13 | |||
14 | import javax.servlet.ServletContextEvent; | ||
15 | import javax.servlet.ServletContextListener; | ||
16 | import javax.servlet.annotation.WebListener; | ||
17 | import java.sql.Connection; | ||
18 | import java.sql.Driver; | ||
19 | import java.sql.DriverManager; | ||
20 | import java.util.Enumeration; | ||
21 | |||
22 | @WebListener | ||
23 | public class ServerListener implements ServletContextListener { | ||
24 | private static Logger logger; | ||
25 | private static final String CATALINA_BASE = "catalina.base"; | ||
26 | private static final String JETTY_HOME = "jetty.home"; | ||
27 | private static final String PROJECT_NAME = "project.name"; | ||
28 | private static final String PROJECT_VERSION = "project.version"; | ||
29 | |||
30 | public static Scheduler scheduler = null; | ||
31 | |||
32 | public static Boolean bStarted = true; | ||
33 | |||
34 | @Override | ||
35 | public void contextDestroyed(ServletContextEvent event) { | ||
36 | logger.debug("contextDestroyed"); | ||
37 | if (null != scheduler) { | ||
38 | try { | ||
39 | // scheduler.pauseAll(); | ||
40 | bStarted = false; | ||
41 | scheduler.shutdown(true); | ||
42 | } catch (SchedulerException e) { | ||
43 | logger.error("scheduler shutdown error", e); | ||
44 | } | ||
45 | } | ||
46 | // try { | ||
47 | // Thread.sleep(2000); | ||
48 | // } catch (InterruptedException e) { | ||
49 | // e.printStackTrace(); | ||
50 | // } | ||
51 | try { | ||
52 | Enumeration<Driver> drivers = DriverManager.getDrivers(); | ||
53 | while (drivers.hasMoreElements()) { | ||
54 | Driver driver = drivers.nextElement(); | ||
55 | DriverManager.deregisterDriver(driver); | ||
56 | } | ||
57 | AbandonedConnectionCleanupThread.shutdown(); | ||
58 | DruidUtil.closeAllDataSource(); | ||
59 | MemcachedClient.shutdown(); | ||
60 | } catch (Exception ex) { | ||
61 | ex.printStackTrace(); | ||
62 | } | ||
63 | } | ||
64 | |||
65 | @Override | ||
66 | public void contextInitialized(ServletContextEvent event) { | ||
67 | if (null == System.getProperty(CATALINA_BASE)) { | ||
68 | String home = System.getProperty(JETTY_HOME); | ||
69 | if (null != home) { | ||
70 | System.setProperty(CATALINA_BASE, home); | ||
71 | } else { | ||
72 | System.setProperty(CATALINA_BASE, "."); | ||
73 | } | ||
74 | } | ||
75 | |||
76 | System.setProperty(PROJECT_NAME, PropertiesUtil.get(PROJECT_NAME)); | ||
77 | System.setProperty("druid.registerToSysProperty", "true"); | ||
78 | |||
79 | logger = LoggerFactory.getLogger(ServerListener.class); | ||
80 | |||
81 | logger.debug("contextInitialized [name:" | ||
82 | + PropertiesUtil.get(PROJECT_NAME) + ", version:" | ||
83 | + PropertiesUtil.get(PROJECT_VERSION) + "]"); | ||
84 | |||
85 | Connection readConnection = null; | ||
86 | Connection writeConnection = null; | ||
87 | try { | ||
88 | readConnection = DruidUtil.getRandomReadConnection(); | ||
89 | writeConnection = DruidUtil.getRandomWriteConnection(); | ||
90 | } catch (Exception ex) { | ||
91 | logger.error("Connection init error", ex); | ||
92 | } finally { | ||
93 | if (null != readConnection) { | ||
94 | DruidUtil.close(readConnection); | ||
95 | } | ||
96 | if (null != writeConnection) { | ||
97 | DruidUtil.close(writeConnection); | ||
98 | } | ||
99 | } | ||
100 | |||
101 | try { | ||
102 | SchedulerFactory sf = new StdSchedulerFactory("quartz.properties"); | ||
103 | scheduler = sf.getScheduler(); | ||
104 | scheduler.start(); | ||
105 | } catch (Exception e) { | ||
106 | logger.error("start quartz error", e); | ||
107 | } | ||
108 | |||
109 | } | ||
110 | } |
1 | package com.topdraw.sohu.port.job; | ||
2 | |||
3 | import com.topdraw.sohu.listener.ServerListener; | ||
4 | import com.topdraw.sohu.utils.SohuUtil; | ||
5 | import org.afflatus.utility.DruidUtil; | ||
6 | import org.quartz.*; | ||
7 | import org.slf4j.Logger; | ||
8 | import org.slf4j.LoggerFactory; | ||
9 | |||
10 | import java.sql.Connection; | ||
11 | import java.util.ArrayList; | ||
12 | import java.util.List; | ||
13 | import java.util.Map; | ||
14 | |||
15 | @DisallowConcurrentExecution | ||
16 | public class M3U8Job implements Job { | ||
17 | private static final Logger log = LoggerFactory.getLogger(M3U8Job.class); | ||
18 | |||
19 | private List<Map<String, Object>> listCmd = new ArrayList<>(); | ||
20 | |||
21 | public void execute(JobExecutionContext context) throws JobExecutionException { | ||
22 | Connection writeConnection = null; | ||
23 | |||
24 | try { | ||
25 | writeConnection = DruidUtil.getRandomWriteConnection(); | ||
26 | log.info("M3U8Job start"); | ||
27 | listCmd = getTodoCmd(); | ||
28 | |||
29 | if (listCmd.size() == 0) { | ||
30 | log.info("no cmd to download m3u8"); | ||
31 | return; | ||
32 | } else { | ||
33 | if (null != ServerListener.scheduler) { | ||
34 | ServerListener.scheduler.pauseJob(context.getJobDetail().getKey()); | ||
35 | } | ||
36 | } | ||
37 | while (listCmd.size() != 0) { | ||
38 | if (!ServerListener.bStarted) { | ||
39 | // tomcat 是否正在停止 | ||
40 | log.info("tomcat is shutting down, the excuting job break"); | ||
41 | break; | ||
42 | } | ||
43 | for (Map<String, Object> mapCmd : listCmd) { | ||
44 | SohuUtil util = new SohuUtil(); | ||
45 | util.downloadM3U8ByVideoId((Long) mapCmd.get("video_id")); | ||
46 | } | ||
47 | listCmd = getTodoCmd(); | ||
48 | } | ||
49 | if (null != ServerListener.scheduler) { | ||
50 | ServerListener.scheduler.resumeJob(context.getJobDetail().getKey()); | ||
51 | } | ||
52 | } catch (Exception e) { | ||
53 | log.error("M3U8Job error", e); | ||
54 | if (null != ServerListener.scheduler) { | ||
55 | try { | ||
56 | ServerListener.scheduler.resumeJob(context.getJobDetail().getKey()); | ||
57 | } catch (SchedulerException se) { | ||
58 | // TODO Auto-generated catch block | ||
59 | se.printStackTrace(); | ||
60 | } | ||
61 | } | ||
62 | } finally { | ||
63 | DruidUtil.close(writeConnection); | ||
64 | } | ||
65 | } | ||
66 | |||
67 | private static List<Map<String, Object>> getTodoCmd() { | ||
68 | log.info("start get cmd to do..."); | ||
69 | Connection readConnection = null; | ||
70 | List<Map<String, Object>> listRet = new ArrayList<>(); | ||
71 | try { | ||
72 | readConnection = DruidUtil.getRandomReadConnection(); | ||
73 | String sql = "SELECT * FROM sohu_video WHERE" + | ||
74 | //" tvPlayType <> 1 AND" + | ||
75 | " m3u8_path IS NULL" + | ||
76 | " AND definition IS NOT NULL LIMIT 0, 20"; | ||
77 | listRet = DruidUtil.queryList(readConnection, sql); | ||
78 | } catch (Exception ex) { | ||
79 | ex.printStackTrace(); | ||
80 | } finally { | ||
81 | DruidUtil.close(readConnection); | ||
82 | } | ||
83 | return listRet; | ||
84 | } | ||
85 | } |
1 | package com.topdraw.sohu.port.job; | ||
2 | |||
3 | import com.topdraw.sohu.listener.ServerListener; | ||
4 | import com.topdraw.sohu.utils.SohuUtil; | ||
5 | import org.quartz.*; | ||
6 | import org.slf4j.Logger; | ||
7 | import org.slf4j.LoggerFactory; | ||
8 | |||
9 | @DisallowConcurrentExecution | ||
10 | public class MetaDataJob implements Job { | ||
11 | private static final Logger log = LoggerFactory.getLogger(MetaDataJob.class); | ||
12 | |||
13 | public void execute(JobExecutionContext context) throws JobExecutionException { | ||
14 | |||
15 | try { | ||
16 | log.info("MetaDataJob start"); | ||
17 | ServerListener.scheduler.pauseJob(context.getJobDetail().getKey()); | ||
18 | |||
19 | SohuUtil util = new SohuUtil(); | ||
20 | util.metaDataIncrementInjection(); | ||
21 | |||
22 | //for (int i = 9; i <= 9; i++) { | ||
23 | // util.metaDataIncrementInjectionByXml("2018070" + i); | ||
24 | //} | ||
25 | |||
26 | if (null != ServerListener.scheduler) { | ||
27 | ServerListener.scheduler.resumeJob(context.getJobDetail().getKey()); | ||
28 | } | ||
29 | } catch (Exception e) { | ||
30 | log.error("MetaDataJob error", e); | ||
31 | if (null != ServerListener.scheduler) { | ||
32 | try { | ||
33 | ServerListener.scheduler.resumeJob(context.getJobDetail().getKey()); | ||
34 | } catch (SchedulerException se) { | ||
35 | // TODO Auto-generated catch block | ||
36 | se.printStackTrace(); | ||
37 | } | ||
38 | } | ||
39 | } | ||
40 | } | ||
41 | } |
1 | package com.topdraw.sohu.utils; | ||
2 | |||
3 | import java.sql.Connection; | ||
4 | import java.util.HashMap; | ||
5 | import java.util.Map; | ||
6 | |||
7 | import org.afflatus.utility.DruidUtil; | ||
8 | |||
9 | import com.alibaba.fastjson.JSON; | ||
10 | |||
11 | public class BIInterfaceUtil { | ||
12 | private static final String BI_SWITCH = PropertiesUtil.get("BI_SWITCH"); | ||
13 | |||
14 | public static void createMediaAssetsPublish(String strAction, String strPlatform, String strType, String strName, | ||
15 | Map<String, Object> mapData, Connection writeConnection) { | ||
16 | try { | ||
17 | if (BI_SWITCH.equals("off")) { | ||
18 | return; | ||
19 | } | ||
20 | Map<String, Object> mapSave = new HashMap<String, Object>(); | ||
21 | mapSave.put("action", strAction); | ||
22 | mapSave.put("platform", strPlatform); | ||
23 | mapSave.put("type", strType); | ||
24 | mapSave.put("name", strName); | ||
25 | if (mapData.containsKey("create_time")) { | ||
26 | mapData.remove("create_time"); | ||
27 | } | ||
28 | if (mapData.containsKey("update_time")) { | ||
29 | mapData.remove("update_time"); | ||
30 | } | ||
31 | if (JSON.toJSONString(mapData).length() > 4096) { | ||
32 | if (mapData.containsKey("image")) { | ||
33 | mapData.remove("image"); | ||
34 | } | ||
35 | if (mapData.containsKey("images")) { | ||
36 | mapData.remove("images"); | ||
37 | } | ||
38 | } | ||
39 | mapSave.put("data", JSON.toJSONString(mapData)); | ||
40 | DruidUtil.save(writeConnection, mapSave, "x_media_assets_publish"); | ||
41 | } catch (Exception ex) { | ||
42 | ex.printStackTrace(); | ||
43 | } finally { | ||
44 | |||
45 | } | ||
46 | } | ||
47 | } |
1 | package com.topdraw.sohu.utils; | ||
2 | |||
3 | import org.apache.commons.net.ftp.FTPClient; | ||
4 | import org.apache.commons.net.ftp.FTPFile; | ||
5 | import org.apache.commons.net.ftp.FTPReply; | ||
6 | import org.slf4j.Logger; | ||
7 | import org.slf4j.LoggerFactory; | ||
8 | |||
9 | import java.io.*; | ||
10 | |||
11 | public class FTPUtils { | ||
12 | private static final Logger logger = LoggerFactory | ||
13 | .getLogger(FTPUtils.class); | ||
14 | |||
15 | private final String PROJECT_PATH = this.getClass().getClassLoader().getResource("../../") == null ? | ||
16 | (System.getProperty("user.dir").endsWith("/") ? System.getProperty("user.dir") | ||
17 | : System.getProperty("user.dir") + "/") | ||
18 | : this.getClass().getClassLoader().getResource("../../").getPath(); | ||
19 | |||
20 | /** | ||
21 | * Description: 向FTP服务器上传文件 | ||
22 | * @param url FTP服务器hostname | ||
23 | * @param port FTP服务器端口 | ||
24 | * @param username FTP登录账号 | ||
25 | * @param password FTP登录密码 | ||
26 | * @param path FTP服务器保存目录 | ||
27 | * @param filename 上传到FTP服务器上的文件名 | ||
28 | * @param input 输入流 | ||
29 | * @return 成功返回true,否则返回false | ||
30 | */ | ||
31 | public boolean uploadFile(String url, int port, String username, String password, String path, String filename, InputStream input) { | ||
32 | boolean success = false; | ||
33 | FTPClient ftp = new FTPClient(); | ||
34 | try { | ||
35 | int reply; | ||
36 | ftp.connect(url, port);//连接FTP服务器 | ||
37 | //如果采用默认端口,可以使用ftp.connect(url)的方式直接连接FTP服务器 | ||
38 | ftp.login(username, password);//登录 | ||
39 | reply = ftp.getReplyCode(); | ||
40 | System.out.println("ReplyCode: " + reply); | ||
41 | if (!FTPReply.isPositiveCompletion(reply)) { | ||
42 | ftp.disconnect(); | ||
43 | return success; | ||
44 | } | ||
45 | ftp.changeWorkingDirectory(path); | ||
46 | ftp.storeFile(filename, input); | ||
47 | |||
48 | input.close(); | ||
49 | ftp.logout(); | ||
50 | success = true; | ||
51 | } catch (IOException e) { | ||
52 | e.printStackTrace(); | ||
53 | } finally { | ||
54 | if (ftp.isConnected()) { | ||
55 | try { | ||
56 | ftp.disconnect(); | ||
57 | } catch (IOException ioe) { | ||
58 | } | ||
59 | } | ||
60 | } | ||
61 | return success; | ||
62 | } | ||
63 | |||
64 | /** | ||
65 | * Description: 从FTP服务器下载文件 | ||
66 | * @param url FTP服务器hostname | ||
67 | * @param port FTP服务器端口 | ||
68 | * @param username FTP登录账号 | ||
69 | * @param password FTP登录密码 | ||
70 | * @param remotePath FTP服务器上的相对路径 | ||
71 | * @param fileName 要下载的文件名 | ||
72 | * @param localPath 下载后保存到本地的路径 | ||
73 | * @return | ||
74 | */ | ||
75 | public static boolean downFile( | ||
76 | String url, int port, String username, String password, String remotePath, String fileName, | ||
77 | String localPath) { | ||
78 | boolean success = false; | ||
79 | FTPClient ftp = new FTPClient(); | ||
80 | try { | ||
81 | int reply; | ||
82 | ftp.setConnectTimeout(10 * 1000); | ||
83 | ftp.connect(url, port); | ||
84 | //如果采用默认端口,可以使用ftp.connect(url)的方式直接连接FTP服务器 | ||
85 | ftp.login(username, password);//登录 | ||
86 | ftp.setBufferSize(102400); | ||
87 | ftp.setFileType(FTPClient.BINARY_FILE_TYPE); | ||
88 | reply = ftp.getReplyCode(); | ||
89 | System.out.println("ReplyCode: " + reply); | ||
90 | if (!FTPReply.isPositiveCompletion(reply)) { | ||
91 | logger.info("isPositiveCompletion"); | ||
92 | ftp.disconnect(); | ||
93 | return success; | ||
94 | } | ||
95 | logger.info("remotePath: " + remotePath); | ||
96 | ftp.changeWorkingDirectory(remotePath);//转移到FTP服务器目录 | ||
97 | ftp.enterLocalPassiveMode(); | ||
98 | FTPFile[] fs = ftp.listFiles(); | ||
99 | for (FTPFile ff:fs) { | ||
100 | if (ff.getName().equals(fileName)) { | ||
101 | File localFile = new File(localPath + "/" + ff.getName()); | ||
102 | logger.info("file size: " + ff.getSize()); | ||
103 | |||
104 | OutputStream is = new FileOutputStream(localFile); | ||
105 | ftp.retrieveFile(ff.getName(), is); | ||
106 | is.close(); | ||
107 | logger.info("download file size: " + localFile.length()); | ||
108 | if (ff.getSize() == localFile.length()) { | ||
109 | success = true; | ||
110 | } else { | ||
111 | logger.info("file size mismatch: " + ff.getSize() + " -> " + localPath.length()); | ||
112 | } | ||
113 | break; | ||
114 | } | ||
115 | } | ||
116 | ftp.logout(); | ||
117 | } catch (IOException e) { | ||
118 | e.printStackTrace(); | ||
119 | success = false; | ||
120 | } finally { | ||
121 | if (ftp.isConnected()) { | ||
122 | try { | ||
123 | ftp.disconnect(); | ||
124 | } catch (IOException ioe) { | ||
125 | ioe.printStackTrace(); | ||
126 | } | ||
127 | } | ||
128 | } | ||
129 | return success; | ||
130 | } | ||
131 | |||
132 | public String downFile(String strFtpPath, String strRelativePath) { | ||
133 | String strUrl = "", strUserName = "", strPassWord = "", strRemotePath = "", strFileName = "", strLocalPath = ""; | ||
134 | int iPort = 21; | ||
135 | // 截取FTP地址 | ||
136 | final String strFtpFlag = "ftp://"; | ||
137 | if (strFtpPath != null && strFtpPath.length() > 0 && strFtpPath.toLowerCase().contains(strFtpFlag)) { | ||
138 | // 首先去掉FTP | ||
139 | final String cutedFtp = strFtpPath.substring(strFtpPath.indexOf(strFtpFlag) + strFtpFlag.length()); | ||
140 | // 首先截取用户名、密码、ip、端口 | ||
141 | String str4 = ""; | ||
142 | if (cutedFtp.indexOf("/") != -1) { | ||
143 | str4 = cutedFtp.substring(0, cutedFtp.indexOf("/")); | ||
144 | } else { | ||
145 | str4 = cutedFtp; | ||
146 | } | ||
147 | // 截取用户名、密码 | ||
148 | String strUsernameAndPwd = str4.substring(0, str4.indexOf("@")); | ||
149 | |||
150 | // 截取ip、端口 | ||
151 | String strIpAndPort = str4.substring(str4.indexOf("@") + 1); | ||
152 | |||
153 | // 开始获取ip和端口 | ||
154 | if (!"".equals(strIpAndPort)) { | ||
155 | if (strIpAndPort.indexOf(":") != -1) { | ||
156 | strUrl = strIpAndPort.substring(0, strIpAndPort.indexOf(":")); | ||
157 | String strPort = strIpAndPort.substring(strIpAndPort.indexOf(":") + 1, strIpAndPort.length()); | ||
158 | if (strPort != null) { | ||
159 | iPort = Integer.parseInt(strPort); | ||
160 | } | ||
161 | } else { | ||
162 | // 如果没有端口只获取IP | ||
163 | strUrl = strIpAndPort; | ||
164 | } | ||
165 | } | ||
166 | |||
167 | // 开始获取用户名和密码 | ||
168 | if (!"".equals(strUsernameAndPwd)) { | ||
169 | strUserName = strUsernameAndPwd.substring(0, strUsernameAndPwd.indexOf(":")); | ||
170 | strPassWord = strUsernameAndPwd.substring(strUsernameAndPwd.indexOf(":") + 1); | ||
171 | } | ||
172 | |||
173 | // 截取ftp文件路径和文件名 | ||
174 | String strFileNameAndPath = ""; | ||
175 | if (cutedFtp.indexOf("/") != -1) { | ||
176 | strFileNameAndPath = cutedFtp.substring(cutedFtp.indexOf("/") + 1, cutedFtp.length()); | ||
177 | } else { | ||
178 | strFileNameAndPath = ""; | ||
179 | } | ||
180 | // 开始获取ftp文件路径和文件名 | ||
181 | if (!"".equals(strIpAndPort)) { | ||
182 | if (strFileNameAndPath.indexOf("/") != -1) { | ||
183 | strRemotePath = strFileNameAndPath.substring(0, strFileNameAndPath.lastIndexOf("/")); | ||
184 | strFileName = strFileNameAndPath.substring(strFileNameAndPath.lastIndexOf("/") + 1, | ||
185 | strFileNameAndPath.length()); | ||
186 | } else { | ||
187 | strFileName = strFileNameAndPath; | ||
188 | } | ||
189 | } | ||
190 | } | ||
191 | |||
192 | strLocalPath = PROJECT_PATH + strRelativePath + strFileName; | ||
193 | logger.info("file download target path: " + strLocalPath); | ||
194 | File file = new File(strLocalPath); | ||
195 | if (!file.isDirectory()) { // 目录不存在 | ||
196 | String[] aPathSegments = strLocalPath.split("/"); | ||
197 | String strWalkThroughPath = "/"; | ||
198 | for (int i = 0; i < aPathSegments.length - 1; i++) { | ||
199 | strWalkThroughPath = strWalkThroughPath + "/" + aPathSegments[i]; | ||
200 | file = new File(strWalkThroughPath); | ||
201 | if (!file.isDirectory()) { | ||
202 | file.mkdir(); | ||
203 | } | ||
204 | } | ||
205 | } | ||
206 | |||
207 | boolean b = FTPUtils.downFile(strUrl, iPort, strUserName, strPassWord, strRemotePath, strFileName, | ||
208 | PROJECT_PATH + strRelativePath); | ||
209 | if (!b) { | ||
210 | // 下载失败 | ||
211 | logger.info("file download error: " + strLocalPath); | ||
212 | return ""; | ||
213 | } else { | ||
214 | logger.info("file saved: " + strLocalPath); | ||
215 | } | ||
216 | return strLocalPath; | ||
217 | } | ||
218 | |||
219 | public static void main(String[] args) { | ||
220 | try { | ||
221 | //FileInputStream in = new FileInputStream(new File("D:/adv.txt")); | ||
222 | //boolean flag = uploadFile("10.50.127.181", 21, "root", "bestvwin", "/tmp", "adv.txt", in); | ||
223 | //boolean flag = downFile("172.25.44.26", 21, "wacos", "wacos", "/opt/wacos/CTMSData/picture/2018/02/07", | ||
224 | // "20180207112330_320029.jpg", ""); | ||
225 | //System.out.println(flag); | ||
226 | } catch (Exception e) { | ||
227 | e.printStackTrace(); | ||
228 | } | ||
229 | } | ||
230 | } |
1 | package com.topdraw.sohu.utils; | ||
2 | |||
3 | import org.slf4j.Logger; | ||
4 | import org.slf4j.LoggerFactory; | ||
5 | |||
6 | import java.io.*; | ||
7 | import java.net.HttpURLConnection; | ||
8 | import java.net.MalformedURLException; | ||
9 | import java.net.URL; | ||
10 | import java.net.URLConnection; | ||
11 | |||
12 | public class HttpConnectionUtil { | ||
13 | |||
14 | private static final Logger log = LoggerFactory | ||
15 | .getLogger(HttpConnectionUtil.class); | ||
16 | |||
17 | /** | ||
18 | * @param strUrlPath 下载路径 | ||
19 | * @param strDownloadDir 下载存放目录 | ||
20 | * @param strDownloadDir 下载文件重命名(null保持原有名字) | ||
21 | * @return 返回下载文件 | ||
22 | */ | ||
23 | public static File downloadFile(String strUrlPath, String strDownloadDir, String strFileName) { | ||
24 | File file = null; | ||
25 | try { | ||
26 | // 统一资源 | ||
27 | URL url = new URL(strUrlPath); | ||
28 | // 连接类的父类,抽象类 | ||
29 | URLConnection urlConnection = url.openConnection(); | ||
30 | // http的连接类 | ||
31 | HttpURLConnection httpURLConnection = (HttpURLConnection) urlConnection; | ||
32 | // 设定请求的方法,默认是GET | ||
33 | httpURLConnection.setRequestMethod("GET"); | ||
34 | // 设置字符编码 | ||
35 | httpURLConnection.setRequestProperty("Charset", "UTF-8"); | ||
36 | |||
37 | httpURLConnection.setRequestProperty("Accept-Encoding", "identity"); | ||
38 | httpURLConnection.setReadTimeout(10000); | ||
39 | // 打开到此 URL 引用的资源的通信链接(如果尚未建立这样的连接)。 | ||
40 | httpURLConnection.connect(); | ||
41 | |||
42 | // 文件大小 | ||
43 | int fileLength = httpURLConnection.getContentLength(); | ||
44 | |||
45 | // 文件名 | ||
46 | String filePathUrl = strFileName == null ? httpURLConnection.getURL().getFile() : strFileName; | ||
47 | String fileFullName = filePathUrl.substring(filePathUrl.lastIndexOf(File.separatorChar) + 1); | ||
48 | |||
49 | //log.info("file length -> " + fileLength); | ||
50 | |||
51 | URLConnection con = url.openConnection(); | ||
52 | |||
53 | BufferedInputStream bin = new BufferedInputStream(httpURLConnection.getInputStream()); | ||
54 | |||
55 | String path = strDownloadDir + File.separatorChar + fileFullName; | ||
56 | file = new File(path); | ||
57 | if (!file.getParentFile().exists()) { | ||
58 | file.getParentFile().mkdirs(); | ||
59 | } | ||
60 | OutputStream out = new FileOutputStream(file); | ||
61 | int size = 0; | ||
62 | int len = 0; | ||
63 | byte[] buf = new byte[1024]; | ||
64 | while ((size = bin.read(buf)) != -1) { | ||
65 | len += size; | ||
66 | out.write(buf, 0, size); | ||
67 | // 打印下载百分比 | ||
68 | // log.info("下载了 -> " + len * 100 / fileLength); | ||
69 | } | ||
70 | bin.close(); | ||
71 | out.close(); | ||
72 | |||
73 | if (file.length() != fileLength && fileLength != -1) { | ||
74 | log.info("file size mismatch: " + fileLength + " -> " + file.length()); | ||
75 | file = null; | ||
76 | } | ||
77 | } catch (MalformedURLException e) { | ||
78 | e.printStackTrace(); | ||
79 | file = null; | ||
80 | } catch (IOException e) { | ||
81 | e.printStackTrace(); | ||
82 | file = null; | ||
83 | } | ||
84 | return file; | ||
85 | |||
86 | } | ||
87 | |||
88 | public static void main(String[] args) { | ||
89 | |||
90 | } | ||
91 | |||
92 | } |
1 | package com.topdraw.sohu.utils; | ||
2 | |||
3 | import com.alibaba.fastjson.JSON; | ||
4 | import com.alibaba.fastjson.JSONArray; | ||
5 | import com.alibaba.fastjson.JSONObject; | ||
6 | |||
7 | import java.util.ArrayList; | ||
8 | import java.util.HashMap; | ||
9 | import java.util.List; | ||
10 | import java.util.Map; | ||
11 | |||
12 | public class ImageUtil { | ||
13 | |||
14 | public static Map<String, Integer> mapImageType = new HashMap<>(); | ||
15 | static { | ||
16 | mapImageType.put("normal", -1); | ||
17 | mapImageType.put("thumbnail", 0); | ||
18 | mapImageType.put("poster", 1); | ||
19 | mapImageType.put("stills", 2); | ||
20 | mapImageType.put("icon", 3); | ||
21 | mapImageType.put("title", 4); | ||
22 | mapImageType.put("ad", 5); | ||
23 | mapImageType.put("sketch", 6); | ||
24 | mapImageType.put("background", 7); | ||
25 | mapImageType.put("channel", 9); | ||
26 | mapImageType.put("channel_bw", 10); | ||
27 | mapImageType.put("channel_logo", 11); | ||
28 | mapImageType.put("channel_name", 12); | ||
29 | mapImageType.put("other", 99); | ||
30 | } | ||
31 | |||
32 | public static String convertStrImages2SpecialFormatJSON(String strImages) { | ||
33 | JSONArray jsonArr; | ||
34 | if (strImages != null) { | ||
35 | jsonArr = JSONArray.parseArray(strImages); | ||
36 | } else { | ||
37 | jsonArr = new JSONArray(); | ||
38 | } | ||
39 | Map<String, Object> mapImages = new HashMap<String, Object>(); | ||
40 | mapImages.put("map", new HashMap<String, Object>()); | ||
41 | mapImages.put("list", new ArrayList<Map<String, Object>>()); | ||
42 | |||
43 | for (int i = 0; i < jsonArr.size(); i++) { | ||
44 | JSONObject jsonObj = jsonArr.getJSONObject(i); | ||
45 | String strKey = ResourceUtil.getImageKeyByType(jsonObj.containsKey("type") ? jsonObj.getIntValue("type") : -1); | ||
46 | if (((Map<String, Object>) mapImages.get("map")).containsKey(strKey)) { | ||
47 | ((ArrayList<Integer>) ((Map<String, Object>) mapImages.get("map")).get(strKey)).add(i); | ||
48 | } else { | ||
49 | List<Integer> listIndex = new ArrayList<Integer>(); | ||
50 | listIndex.add(i); | ||
51 | ((Map<String, Object>) mapImages.get("map")).put(strKey, listIndex); | ||
52 | } | ||
53 | Map<String, Object> mapImageDetail = new HashMap<String, Object>(); | ||
54 | mapImageDetail.put("id", jsonObj.getLong("id")); | ||
55 | mapImageDetail.put("fileUrl", jsonObj.getString("fileUrl")); | ||
56 | mapImageDetail.put("height", jsonObj.getInteger("height")); | ||
57 | mapImageDetail.put("width", jsonObj.getInteger("width")); | ||
58 | mapImageDetail.put("size", jsonObj.getInteger("size")); | ||
59 | mapImageDetail.put("extension", jsonObj.getString("extension")); | ||
60 | mapImageDetail.put("enable", jsonObj.getBoolean("enable")); | ||
61 | mapImageDetail.put("name", jsonObj.getString("name")); | ||
62 | mapImageDetail.put("csp_code", jsonObj.getString("csp_code")); | ||
63 | ((ArrayList<Map<String, Object>>) mapImages.get("list")).add(mapImageDetail); | ||
64 | } | ||
65 | return JSON.toJSONString(mapImages); | ||
66 | } | ||
67 | |||
68 | |||
69 | } |
1 | package com.topdraw.sohu.utils; | ||
2 | |||
3 | import java.util.ArrayList; | ||
4 | import java.util.Collections; | ||
5 | import java.util.List; | ||
6 | |||
7 | public class M3U8 { | ||
8 | private String strBasePath; | ||
9 | private List<Ts> listTs = new ArrayList<>(); | ||
10 | private long lStartTime;// 开始时间 | ||
11 | private long lEndTime;// 结束时间 | ||
12 | private long lStartDownloadTime;// 开始下载时间 | ||
13 | private long lEndDownloadTime;// 结束下载时间 | ||
14 | |||
15 | public String getStrBasePath() { | ||
16 | return strBasePath; | ||
17 | } | ||
18 | |||
19 | public void setStrBasePath(String strBasePath) { | ||
20 | this.strBasePath = strBasePath; | ||
21 | } | ||
22 | |||
23 | public List<Ts> getListTs() { | ||
24 | return listTs; | ||
25 | } | ||
26 | |||
27 | public void setListTs(List<Ts> listTs) { | ||
28 | this.listTs = listTs; | ||
29 | } | ||
30 | |||
31 | public void addTs(Ts ts) { | ||
32 | this.listTs.add(ts); | ||
33 | } | ||
34 | |||
35 | public long getLStartDownloadTime() { | ||
36 | return lStartDownloadTime; | ||
37 | } | ||
38 | |||
39 | public void setLStartDownloadTime(long lStartDownloadTime) { | ||
40 | this.lStartDownloadTime = lStartDownloadTime; | ||
41 | } | ||
42 | |||
43 | public long getLEndDownloadTime() { | ||
44 | return lEndDownloadTime; | ||
45 | } | ||
46 | |||
47 | public void setLEndDownloadTime(long lEndDownloadTime) { | ||
48 | this.lEndDownloadTime = lEndDownloadTime; | ||
49 | } | ||
50 | |||
51 | /** | ||
52 | * 获取开始时间 | ||
53 | * | ||
54 | * @return | ||
55 | */ | ||
56 | public long getLStartTime() { | ||
57 | if (listTs.size() > 0) { | ||
58 | Collections.sort(listTs); | ||
59 | lStartTime = listTs.get(0).getLongDate(); | ||
60 | return lStartTime; | ||
61 | } | ||
62 | return 0; | ||
63 | } | ||
64 | |||
65 | /** | ||
66 | * 获取结束时间(加上了最后一段时间的持续时间) | ||
67 | * | ||
68 | * @return | ||
69 | */ | ||
70 | public long getLEndTime() { | ||
71 | if (listTs.size() > 0) { | ||
72 | Ts tsM3U8 = listTs.get(listTs.size() - 1); | ||
73 | lEndTime = tsM3U8.getLongDate() + (long) (tsM3U8.getFSeconds() * 1000); | ||
74 | return lEndTime; | ||
75 | } | ||
76 | return 0; | ||
77 | } | ||
78 | |||
79 | @Override | ||
80 | public String toString() { | ||
81 | StringBuilder sb = new StringBuilder(); | ||
82 | sb.append("strBasePath: " + strBasePath); | ||
83 | for (Ts ts : listTs) { | ||
84 | sb.append("\nts_file_name = " + ts); | ||
85 | } | ||
86 | sb.append("\n\nlStartTime = " + lStartTime); | ||
87 | sb.append("\n\nlEndTime = " + lEndTime); | ||
88 | sb.append("\n\nlStartDownloadTime = " + lStartDownloadTime); | ||
89 | sb.append("\n\nlEndDownloadTime = " + lEndDownloadTime); | ||
90 | return sb.toString(); | ||
91 | } | ||
92 | |||
93 | public static class Ts implements Comparable<Ts> { | ||
94 | private String strFilePath; | ||
95 | private float fSeconds; | ||
96 | |||
97 | public Ts(String strFilePath, float fSeconds) { | ||
98 | this.strFilePath = strFilePath; | ||
99 | this.fSeconds = fSeconds; | ||
100 | } | ||
101 | |||
102 | public String getStrFilePath() { | ||
103 | return strFilePath; | ||
104 | } | ||
105 | |||
106 | public void setStrFilePath(String strFilePath) { | ||
107 | this.strFilePath = strFilePath; | ||
108 | } | ||
109 | |||
110 | public float getFSeconds() { | ||
111 | return fSeconds; | ||
112 | } | ||
113 | |||
114 | public void setFSeconds(float fSeconds) { | ||
115 | this.fSeconds = fSeconds; | ||
116 | } | ||
117 | |||
118 | @Override | ||
119 | public String toString() { | ||
120 | return strFilePath + " (" + fSeconds + "sec)"; | ||
121 | } | ||
122 | |||
123 | /** | ||
124 | * 获取时间 | ||
125 | */ | ||
126 | public long getLongDate() { | ||
127 | try { | ||
128 | return Long.parseLong(strFilePath.substring(0, strFilePath.lastIndexOf("."))); | ||
129 | } catch (Exception e) { | ||
130 | return 0; | ||
131 | } | ||
132 | } | ||
133 | |||
134 | @Override | ||
135 | public int compareTo(Ts o) { | ||
136 | return strFilePath.compareTo(o.strFilePath); | ||
137 | } | ||
138 | } | ||
139 | } | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
1 | package com.topdraw.sohu.utils; | ||
2 | |||
3 | import java.io.IOException; | ||
4 | import java.io.InputStream; | ||
5 | import java.util.Properties; | ||
6 | |||
7 | public class PropertiesUtil { | ||
8 | |||
9 | private static final String NAME = "app.properties"; | ||
10 | private static Properties p; | ||
11 | |||
12 | static { | ||
13 | p = new Properties(); | ||
14 | InputStream is = PropertiesUtil.class.getClassLoader() | ||
15 | .getResourceAsStream(NAME); | ||
16 | try { | ||
17 | p.load(is); | ||
18 | } catch (IOException e) { | ||
19 | e.printStackTrace(); | ||
20 | } | ||
21 | } | ||
22 | |||
23 | public static String get(String key) { | ||
24 | return p.getProperty(key); | ||
25 | } | ||
26 | |||
27 | } |
1 | package com.topdraw.sohu.utils; | ||
2 | |||
3 | import java.sql.Connection; | ||
4 | import java.util.HashMap; | ||
5 | import java.util.Map; | ||
6 | import java.util.UUID; | ||
7 | |||
8 | import org.afflatus.utility.DruidUtil; | ||
9 | |||
10 | public class ResourceUtil { | ||
11 | public static void CreateResourceRecycle(String strFileUrl) { | ||
12 | Connection writeConnection = null; | ||
13 | try { | ||
14 | writeConnection = DruidUtil.getRandomWriteConnection(); | ||
15 | Map<String, Object> map = new HashMap<String, Object>(); | ||
16 | map.put("path", strFileUrl); | ||
17 | map.put("id", UUID.randomUUID().toString()); | ||
18 | DruidUtil.beginTransaction(writeConnection); | ||
19 | DruidUtil.save(writeConnection, map, "cm_resource_recycle"); | ||
20 | DruidUtil.commitTransaction(writeConnection); | ||
21 | } catch(Exception ex) { | ||
22 | ex.printStackTrace(); | ||
23 | } finally { | ||
24 | DruidUtil.close(writeConnection); | ||
25 | writeConnection = null; | ||
26 | } | ||
27 | } | ||
28 | |||
29 | public static String getImageKeyByType(int iType) { | ||
30 | String strType = "normal"; | ||
31 | switch (iType) { | ||
32 | case 0: | ||
33 | strType = "thumbnail"; | ||
34 | break; | ||
35 | case 1: | ||
36 | strType = "poster"; | ||
37 | break; | ||
38 | case 2: | ||
39 | strType = "stills"; | ||
40 | break; | ||
41 | case 3: | ||
42 | strType = "icon"; | ||
43 | break; | ||
44 | case 4: | ||
45 | strType = "title"; | ||
46 | break; | ||
47 | case 5: | ||
48 | strType = "ad"; | ||
49 | break; | ||
50 | case 6: | ||
51 | strType = "sketch"; | ||
52 | break; | ||
53 | case 7: | ||
54 | strType = "background"; | ||
55 | break; | ||
56 | case 9: | ||
57 | strType = "channel"; | ||
58 | break; | ||
59 | case 10: | ||
60 | strType = "channel_bw"; | ||
61 | break; | ||
62 | case 11: | ||
63 | strType = "channel_logo"; | ||
64 | break; | ||
65 | case 12: | ||
66 | strType = "channel_name"; | ||
67 | break; | ||
68 | case 99: | ||
69 | strType = "other"; | ||
70 | break; | ||
71 | } | ||
72 | return strType; | ||
73 | } | ||
74 | } |
1 | package com.topdraw.sohu.utils; | ||
2 | |||
3 | import com.alibaba.fastjson.JSONObject; | ||
4 | import org.afflatus.utility.DruidUtil; | ||
5 | import org.afflatus.utility.WebUtil; | ||
6 | import org.apache.log4j.Logger; | ||
7 | import org.dom4j.Document; | ||
8 | import org.dom4j.Element; | ||
9 | import org.dom4j.io.SAXReader; | ||
10 | |||
11 | import java.io.*; | ||
12 | import java.sql.Connection; | ||
13 | import java.sql.SQLException; | ||
14 | import java.text.SimpleDateFormat; | ||
15 | import java.util.*; | ||
16 | |||
17 | public class SohuUtil { | ||
18 | |||
19 | //private final String PROJECT_PATH = this.getClass().getClassLoader().getResource("../../") == null ? | ||
20 | // (System.getProperty("user.dir").endsWith("/") ? System.getProperty("user.dir") | ||
21 | // : System.getProperty("user.dir") + "/") | ||
22 | // : this.getClass().getClassLoader().getResource("../../").getPath(); | ||
23 | private final String PROJECT_PATH = "/prepared/"; | ||
24 | private final String M3U8_RELATIVE_PATH = "m3u8" + File.separatorChar; | ||
25 | private final String XML_RELATIVE_PATH = "xml" + File.separatorChar; | ||
26 | private final String TS_HLS_RELATIVE_PATH = "ts" + File.separatorChar; | ||
27 | private final String API_KEY = "a1ab29b59ea9f581011f1eb6506c7ada"; | ||
28 | |||
29 | private static final String FILM_XML_URL = "http://ott.hd.sohu.com/hd/houyi/film.xml"; | ||
30 | private static final String DRAMA_XML_URL = "http://ott.hd.sohu.com/hd/houyi/drama.xml"; | ||
31 | private static final String VARIETY_XML_URL = "http://ott.hd.sohu.com/hd/houyi/variety.xml"; | ||
32 | private static final String DOCUMENTARY_XML_URL = "http://ott.hd.sohu.com/hd/houyi/documentary.xml"; | ||
33 | private static final String CARTOON_XML_URL = "http://ott.hd.sohu.com/hd/houyi/cartoon.xml"; | ||
34 | private static final String FEEFILM_XML_URL = "http://ott.hd.sohu.com/hd/houyi/feeFilm.xml"; | ||
35 | |||
36 | private static final String FILM_ALL_XML_URL = "http://ott.hd.sohu.com/hd/all/film_all_"; | ||
37 | private static final String DRAMA_ALL_XML_URL = "http://ott.hd.sohu.com/hd/all/drama_all_"; | ||
38 | private static final String VARIETY_ALL_XML_URL = "http://ott.hd.sohu.com/hd/all/variety_all_"; | ||
39 | private static final String DOCUMENTARY_ALL_XML_URL = "http://ott.hd.sohu.com/hd/all/documentary_all_"; | ||
40 | private static final String CARTOON_ALL_XML_URL = "http://ott.hd.sohu.com/hd/all/cartoon_all_"; | ||
41 | private static final String FEEFILM_ALL_XML_URL = "http://ott.hd.sohu.com/hd/all/fee_film_all_"; | ||
42 | |||
43 | private static final String M3U8_URL = "http://h5.ott.tv.sohu.com/api/partner/getM3u8VideoUrls.json"; | ||
44 | |||
45 | private Logger log = Logger.getLogger(this.getClass().getName()); | ||
46 | |||
47 | |||
48 | private void fullMetaDataInjection(String strUrl) { | ||
49 | Connection readConnection = null; | ||
50 | Connection writeConnection = null; | ||
51 | try { | ||
52 | readConnection = DruidUtil.getRandomReadConnection(); | ||
53 | writeConnection = DruidUtil.getRandomWriteConnection(); | ||
54 | |||
55 | File file = HttpConnectionUtil.downloadFile(strUrl, PROJECT_PATH, null); | ||
56 | if (file != null) { | ||
57 | SAXReader reader = new SAXReader(); | ||
58 | Document document = reader.read(file.getPath()); | ||
59 | Element root = document.getRootElement(); | ||
60 | |||
61 | int category_id = 0; | ||
62 | int cate_code = 0; | ||
63 | Date updated = new Date(); | ||
64 | SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); | ||
65 | |||
66 | for (Iterator<?> it = root.elementIterator(); it.hasNext();) { | ||
67 | DruidUtil.beginTransaction(writeConnection); | ||
68 | |||
69 | Element element = (Element) it.next(); | ||
70 | if ("category_id".equals(element.getName())) { | ||
71 | category_id = Integer.parseInt(element.getText()); | ||
72 | } | ||
73 | if ("cate_code".equals(element.getName())) { | ||
74 | cate_code = Integer.parseInt(element.getText()); | ||
75 | } | ||
76 | if ("updated".equals(element.getName())) { | ||
77 | updated = sdf.parse(element.getText()); | ||
78 | } | ||
79 | if ("album".equals(element.getName())) { | ||
80 | Map<String, Object> mapAlbum = new HashMap<>(); | ||
81 | for (Iterator<?> itAlbum = element.elementIterator(); itAlbum.hasNext();) { | ||
82 | Element elAlbum = (Element) itAlbum.next(); | ||
83 | if (elAlbum.getName().equals("publish_time")) { | ||
84 | mapAlbum.put(elAlbum.getName(), elAlbum.getText().equals("") ? null : sdf.parse( | ||
85 | elAlbum.getText())); | ||
86 | } else if (elAlbum.getName().equals("episode_updated") || | ||
87 | elAlbum.getName().equals("episode_total") || elAlbum.getName().equals("year") || | ||
88 | elAlbum.getName().equals("fee") || elAlbum.getName().equals("is_clip") || | ||
89 | elAlbum.getName().equals("is_show") || elAlbum.getName().equals("is_early")) { | ||
90 | mapAlbum.put(elAlbum.getName(), elAlbum.getText().equals("") ? 0 : Integer.parseInt( | ||
91 | elAlbum.getText() | ||
92 | )); | ||
93 | } else if (!elAlbum.getName().equals("videos")) { | ||
94 | mapAlbum.put(elAlbum.getName().equals("update_time") ? "album_update_time" : | ||
95 | elAlbum.getName(), elAlbum.getText().equals("") ? null : elAlbum.getText()); | ||
96 | } else { | ||
97 | for (Iterator<?> itVideos = elAlbum.elementIterator(); itVideos.hasNext();) { | ||
98 | Element elVideos = (Element) itVideos.next(); | ||
99 | if (elVideos.getName().equals("video")) { | ||
100 | Map<String, Object> mapVideo = new HashMap<>(); | ||
101 | for (Iterator<?> itVideo = elVideos.elementIterator(); itVideo.hasNext();) { | ||
102 | Element elVideo = (Element) itVideo.next(); | ||
103 | if (elVideo.getName().equals("issue_time")) { | ||
104 | mapVideo.put(elVideo.getName(), elVideo.getText().equals("") ? null : | ||
105 | sdf.parse(elVideo.getText())); | ||
106 | } else if (elVideo.getName().equals("play_order") || | ||
107 | elVideo.getName().equals("time_length") || | ||
108 | elVideo.getName().equals("is_show") || | ||
109 | elVideo.getName().equals("fee") || | ||
110 | elVideo.getName().equals("tvPlayType")) { | ||
111 | //log.info(elVideo.getName() + " " + mapAlbum.get("name")); | ||
112 | mapVideo.put(elVideo.getName(), elVideo.getText().equals("") ? 0 : | ||
113 | Integer.parseInt(elVideo.getText())); | ||
114 | } else if (elVideo.getName().equals("start_time") || | ||
115 | elVideo.getName().equals("end_time")) { | ||
116 | |||
117 | } else if (!elVideo.getName().equals("logoInfo")) { | ||
118 | mapVideo.put(elVideo.getName().equals("update_time") ? | ||
119 | "video_update_time" : elVideo.getName(), | ||
120 | elVideo.getText().equals("") ? null : elVideo.getText()); | ||
121 | } else { | ||
122 | for (Iterator<?> itLog = elVideo.elementIterator(); itLog.hasNext();) { | ||
123 | Element elLog = (Element) itLog.next(); | ||
124 | if (elLog.getName().equals("dimension")) { | ||
125 | mapVideo.put(elLog.getName(), elLog.getText()); | ||
126 | } else { | ||
127 | mapVideo.put(elLog.getName(), elLog.getText().equals("") ? | ||
128 | 0 : Integer.parseInt(elLog.getText())); | ||
129 | } | ||
130 | } | ||
131 | } | ||
132 | } | ||
133 | mapVideo.put("album_id", mapAlbum.get("album_id")); | ||
134 | String sql = "SELECT id FROM sohu_video WHERE video_id = ?"; | ||
135 | Map<String, Object> mapExist = DruidUtil.queryUniqueResult(readConnection, | ||
136 | sql, mapVideo.get("video_id")); | ||
137 | if (mapExist == null) { | ||
138 | DruidUtil.save(writeConnection, mapVideo, "sohu_video"); | ||
139 | log.info("---" + mapVideo.toString()); | ||
140 | } else { | ||
141 | log.info("Video: " + mapVideo.get("video_id") + " already exists, just update"); | ||
142 | mapVideo.put("id", mapExist.get("id")); | ||
143 | DruidUtil.update(writeConnection, mapVideo, "sohu_video", "id"); | ||
144 | } | ||
145 | |||
146 | } | ||
147 | } | ||
148 | } | ||
149 | } | ||
150 | mapAlbum.put("category_id", category_id); | ||
151 | mapAlbum.put("cate_code", cate_code); | ||
152 | mapAlbum.put("updated", updated); | ||
153 | String sql = "SELECT id FROM sohu_album WHERE album_id = ?"; | ||
154 | Map<String, Object> mapExist = DruidUtil.queryUniqueResult(readConnection, sql, | ||
155 | mapAlbum.get("album_id")); | ||
156 | if (mapExist == null) { | ||
157 | DruidUtil.save(writeConnection, mapAlbum, "sohu_album"); | ||
158 | log.info(mapAlbum.toString()); | ||
159 | } else { | ||
160 | mapAlbum.put("id", mapExist.get("id")); | ||
161 | DruidUtil.update(writeConnection, mapAlbum, "sohu_album", "id"); | ||
162 | log.info("Album: " + mapAlbum.get("album_id") + " already exists, just update"); | ||
163 | } | ||
164 | |||
165 | } | ||
166 | DruidUtil.commitTransaction(writeConnection); | ||
167 | } | ||
168 | |||
169 | } else { | ||
170 | log.info("Download xml failed"); | ||
171 | } | ||
172 | |||
173 | } catch (Exception ex) { | ||
174 | ex.printStackTrace(); | ||
175 | log.error("failure in fullMetaDataInjection", ex); | ||
176 | } finally { | ||
177 | DruidUtil.close(readConnection); | ||
178 | DruidUtil.close(writeConnection); | ||
179 | } | ||
180 | } | ||
181 | |||
182 | public void metaDataIncrementInjection() { | ||
183 | log.info("------ start injecting film metadata ------"); | ||
184 | metaDataIncrementInjection(FILM_XML_URL); | ||
185 | log.info("------ film metadata updated ------"); | ||
186 | |||
187 | log.info("------ start injecting drama metadata ------"); | ||
188 | metaDataIncrementInjection(DRAMA_XML_URL); | ||
189 | log.info("------ drama metadata updated ------"); | ||
190 | |||
191 | log.info("------ start injecting variety metadata ------"); | ||
192 | metaDataIncrementInjection(VARIETY_XML_URL); | ||
193 | log.info("------ variety metadata updated ------"); | ||
194 | |||
195 | log.info("------ start injecting documentary metadata ------"); | ||
196 | metaDataIncrementInjection(DOCUMENTARY_XML_URL); | ||
197 | log.info("------ documentary metadata updated ------"); | ||
198 | |||
199 | log.info("------ start injecting cartoon metadata ------"); | ||
200 | metaDataIncrementInjection(CARTOON_XML_URL); | ||
201 | log.info("------ cartoon metadata updated ------"); | ||
202 | |||
203 | log.info("------ start injecting feeFilm metadata ------"); | ||
204 | metaDataIncrementInjection(FEEFILM_XML_URL); | ||
205 | log.info("------ feeFilm metadata updated ------"); | ||
206 | |||
207 | log.info("****************** 华丽的分割线 ******************"); | ||
208 | |||
209 | log.info("------ start self_produced documentary update ------"); | ||
210 | fullSelfProducedMetaDataUpdate("http://ott.hd.sohu.com/hd/houyi/sohu_documentary_update.xml"); | ||
211 | log.info("------ documentary metadata updated ------"); | ||
212 | |||
213 | log.info("------ start self_produced drama update ------"); | ||
214 | fullSelfProducedMetaDataUpdate("http://ott.hd.sohu.com/hd/houyi/sohu_drama_update.xml"); | ||
215 | log.info("------ drama metadata updated ------"); | ||
216 | |||
217 | log.info("------ start self_produced film update ------"); | ||
218 | fullSelfProducedMetaDataUpdate("http://ott.hd.sohu.com/hd/houyi/sohu_film_update.xml"); | ||
219 | log.info("------ film metadata updated ------"); | ||
220 | |||
221 | log.info("------ start self_produced variety update ------"); | ||
222 | fullSelfProducedMetaDataUpdate("http://ott.hd.sohu.com/hd/houyi/sohu_variety_update.xml"); | ||
223 | log.info("------ variety metadata updated ------"); | ||
224 | |||
225 | } | ||
226 | |||
227 | // 搜狐增量接口(自制内容标识) | ||
228 | private void fullSelfProducedMetaDataUpdate(String strUrl) { | ||
229 | Connection readConnection = null; | ||
230 | Connection writeConnection = null; | ||
231 | try { | ||
232 | readConnection = DruidUtil.getRandomReadConnection(); | ||
233 | writeConnection = DruidUtil.getRandomWriteConnection(); | ||
234 | |||
235 | Date dNow = new Date(); | ||
236 | SimpleDateFormat sdf_ymdh = new SimpleDateFormat("yyyyMMddHH"); | ||
237 | SimpleDateFormat sdf_ymd = new SimpleDateFormat("yyyyMMdd"); | ||
238 | |||
239 | String strDate = sdf_ymdh.format(dNow); | ||
240 | String strDatePath = sdf_ymd.format(dNow) + File.separatorChar; | ||
241 | |||
242 | String strCategory = strUrl.substring(strUrl.lastIndexOf("/") + 1, strUrl.lastIndexOf(".")); | ||
243 | |||
244 | File file = HttpConnectionUtil.downloadFile(strUrl, PROJECT_PATH + XML_RELATIVE_PATH + strDatePath + | ||
245 | strCategory + File.separatorChar, strCategory + "_" + strDate + ".xml"); | ||
246 | if (file != null) { | ||
247 | SAXReader reader = new SAXReader(); | ||
248 | Document document = reader.read(file.getPath()); | ||
249 | Element root = document.getRootElement(); | ||
250 | |||
251 | long lAlbumId = 0; | ||
252 | for (Iterator<?> it = root.elementIterator(); it.hasNext();) { | ||
253 | DruidUtil.beginTransaction(writeConnection); | ||
254 | Element element = (Element) it.next(); | ||
255 | if ("album".equals(element.getName())) { | ||
256 | int i = 0; | ||
257 | for (Iterator<?> itAlbum = element.elementIterator(); itAlbum.hasNext();) { | ||
258 | Element elAlbum = (Element) itAlbum.next(); | ||
259 | if (elAlbum.getName().equals("album_id")) { | ||
260 | lAlbumId = Long.parseLong(elAlbum.getText()); | ||
261 | break; | ||
262 | } | ||
263 | } | ||
264 | String sql = "SELECT id, self_produced FROM sohu_album WHERE album_id = ?"; | ||
265 | Map<String, Object> mapExist = DruidUtil.queryUniqueResult(readConnection, sql, | ||
266 | lAlbumId); | ||
267 | if (mapExist == null) { | ||
268 | log.info("no such album: " + lAlbumId); | ||
269 | } else { | ||
270 | if ((Integer) mapExist.get("self_produced") == 1) { | ||
271 | log.info("unnecessary to update: " + lAlbumId); | ||
272 | } else { | ||
273 | i++; | ||
274 | mapExist.put("self_produced", 1); | ||
275 | DruidUtil.update(writeConnection, mapExist, "sohu_album", "id"); | ||
276 | log.info("Album: " + lAlbumId + " self_produced updated"); | ||
277 | } | ||
278 | } | ||
279 | if (i == 0) { | ||
280 | log.info("no records updated"); | ||
281 | } else { | ||
282 | log.info(i + " records updated"); | ||
283 | } | ||
284 | } | ||
285 | |||
286 | DruidUtil.commitTransaction(writeConnection); | ||
287 | } | ||
288 | |||
289 | } else { | ||
290 | log.info("Download xml failed"); | ||
291 | } | ||
292 | |||
293 | } catch (Exception ex) { | ||
294 | ex.printStackTrace(); | ||
295 | log.error("failure in fullSelfProducedMetaDataUpdate: " + strUrl, ex); | ||
296 | } finally { | ||
297 | DruidUtil.close(readConnection); | ||
298 | DruidUtil.close(writeConnection); | ||
299 | } | ||
300 | } | ||
301 | |||
302 | private void metaDataIncrementInjection(String strURL) { | ||
303 | Connection readConnection = null; | ||
304 | Connection writeConnection = null; | ||
305 | try { | ||
306 | readConnection = DruidUtil.getRandomReadConnection(); | ||
307 | writeConnection = DruidUtil.getRandomWriteConnection(); | ||
308 | |||
309 | Date dNow = new Date(); | ||
310 | strURL = strURL + "?api_key=" + API_KEY + "&t=" + dNow.getTime(); | ||
311 | SimpleDateFormat sdf_ymdh = new SimpleDateFormat("yyyyMMddHH"); | ||
312 | SimpleDateFormat sdf_ymd = new SimpleDateFormat("yyyyMMdd"); | ||
313 | |||
314 | String strDate = sdf_ymdh.format(dNow); | ||
315 | String strDatePath = sdf_ymd.format(dNow) + File.separatorChar; | ||
316 | |||
317 | String strCategory = strURL.substring(strURL.lastIndexOf("/") + 1, strURL.lastIndexOf(".")); | ||
318 | |||
319 | File file = HttpConnectionUtil.downloadFile(strURL, PROJECT_PATH + XML_RELATIVE_PATH + strDatePath + | ||
320 | strCategory + File.separatorChar, strCategory + "_" + strDate + ".xml"); | ||
321 | |||
322 | if (file != null) { | ||
323 | SAXReader reader = new SAXReader(); | ||
324 | Document document = reader.read(file.getPath()); | ||
325 | Element root = document.getRootElement(); | ||
326 | |||
327 | int category_id = 0; | ||
328 | int cate_code = 0; | ||
329 | Date updated = new Date(); | ||
330 | SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); | ||
331 | |||
332 | DruidUtil.beginTransaction(writeConnection); | ||
333 | for (Iterator<?> it = root.elementIterator(); it.hasNext();) { | ||
334 | Element element = (Element) it.next(); | ||
335 | if ("category_id".equals(element.getName())) { | ||
336 | category_id = Integer.parseInt(element.getText()); | ||
337 | } | ||
338 | if ("cate_code".equals(element.getName())) { | ||
339 | cate_code = Integer.parseInt(element.getText()); | ||
340 | } | ||
341 | if ("updated".equals(element.getName())) { | ||
342 | updated = sdf.parse(element.getText()); | ||
343 | } | ||
344 | if ("album".equals(element.getName())) { | ||
345 | Map<String, Object> mapAlbum = new HashMap<>(); | ||
346 | for (Iterator<?> itAlbum = element.elementIterator(); itAlbum.hasNext();) { | ||
347 | Element elAlbum = (Element) itAlbum.next(); | ||
348 | if (elAlbum.getName().equals("publish_time")) { | ||
349 | mapAlbum.put(elAlbum.getName(), elAlbum.getText().equals("") ? null : sdf.parse( | ||
350 | elAlbum.getText())); | ||
351 | } else if (elAlbum.getName().equals("episode_updated") || | ||
352 | elAlbum.getName().equals("episode_total") || elAlbum.getName().equals("year") || | ||
353 | elAlbum.getName().equals("fee") || elAlbum.getName().equals("is_clip") || | ||
354 | elAlbum.getName().equals("is_show") || elAlbum.getName().equals("is_early")) { | ||
355 | mapAlbum.put(elAlbum.getName(), elAlbum.getText().equals("") ? 0 : Integer.parseInt( | ||
356 | elAlbum.getText() | ||
357 | )); | ||
358 | } else if (!elAlbum.getName().equals("videos")) { | ||
359 | mapAlbum.put(elAlbum.getName().equals("update_time") ? "album_update_time" : | ||
360 | elAlbum.getName(), elAlbum.getText().equals("") ? null : elAlbum.getText()); | ||
361 | } else { | ||
362 | for (Iterator<?> itVideos = elAlbum.elementIterator(); itVideos.hasNext();) { | ||
363 | Element elVideos = (Element) itVideos.next(); | ||
364 | if (elVideos.getName().equals("video")) { | ||
365 | Map<String, Object> mapVideo = new HashMap<>(); | ||
366 | for (Iterator<?> itVideo = elVideos.elementIterator(); itVideo.hasNext();) { | ||
367 | Element elVideo = (Element) itVideo.next(); | ||
368 | if (elVideo.getName().equals("issue_time")) { | ||
369 | mapVideo.put(elVideo.getName(), elVideo.getText().equals("") ? null : | ||
370 | sdf.parse(elVideo.getText())); | ||
371 | } else if (elVideo.getName().equals("play_order") || | ||
372 | elVideo.getName().equals("time_length") || | ||
373 | elVideo.getName().equals("is_show") || | ||
374 | elVideo.getName().equals("fee") || | ||
375 | elVideo.getName().equals("tvPlayType")) { | ||
376 | //log.info(elVideo.getName() + " " + mapAlbum.get("name")); | ||
377 | mapVideo.put(elVideo.getName(), elVideo.getText().equals("") ? 0 : | ||
378 | Integer.parseInt(elVideo.getText())); | ||
379 | } else if (elVideo.getName().equals("start_time") || | ||
380 | elVideo.getName().equals("end_time")) { | ||
381 | |||
382 | } else if (!elVideo.getName().equals("logoInfo")) { | ||
383 | mapVideo.put(elVideo.getName().equals("update_time") ? | ||
384 | "video_update_time" : elVideo.getName(), | ||
385 | elVideo.getText().equals("") ? null : elVideo.getText()); | ||
386 | } else { | ||
387 | for (Iterator<?> itLog = elVideo.elementIterator(); itLog.hasNext();) { | ||
388 | Element elLog = (Element) itLog.next(); | ||
389 | if (elLog.getName().equals("dimension")) { | ||
390 | mapVideo.put(elLog.getName(), elLog.getText()); | ||
391 | } else { | ||
392 | mapVideo.put(elLog.getName(), elLog.getText().equals("") ? | ||
393 | 0 : Integer.parseInt(elLog.getText())); | ||
394 | } | ||
395 | } | ||
396 | } | ||
397 | } | ||
398 | mapVideo.put("album_id", mapAlbum.get("album_id")); | ||
399 | String sql = "SELECT id, is_show, m3u8_path FROM sohu_video WHERE video_id = ?"; | ||
400 | Map<String, Object> mapExist = DruidUtil.queryUniqueResult(readConnection, | ||
401 | sql, mapVideo.get("video_id")); | ||
402 | if (mapExist == null) { | ||
403 | mapVideo.put("xml_path", file.getPath()); | ||
404 | try { | ||
405 | DruidUtil.save(writeConnection, mapVideo, "sohu_video"); | ||
406 | log.info("video saved: " + mapVideo.get("video_id") + " [" + | ||
407 | mapVideo.get("video_name") + "]"); | ||
408 | } catch (SQLException sqle) { | ||
409 | log.error("fail to save video", sqle); | ||
410 | } | ||
411 | } else { | ||
412 | mapVideo.put("id", mapExist.get("id")); | ||
413 | |||
414 | if (mapExist.get("m3u8_path") != null && | ||
415 | mapExist.get("m3u8_path").toString().equals("500")) { | ||
416 | // 重新触发下载m3u8_path | ||
417 | mapVideo.put("m3u8_path", null); | ||
418 | } | ||
419 | |||
420 | mapVideo.put("xml_path", file.getPath()); | ||
421 | DruidUtil.update(writeConnection, mapVideo, "sohu_video", "id"); | ||
422 | log.info("video: " + mapVideo.get("video_id") + | ||
423 | " already exists, just update"); | ||
424 | if (mapVideo.get("is_show") != mapExist.get("is_show")) { | ||
425 | Map<String, Object> mapOnlineInfo = new HashMap<>(); | ||
426 | mapOnlineInfo.put("video_id", mapVideo.get("video_id")); | ||
427 | mapOnlineInfo.put("is_online", mapVideo.get("is_show")); | ||
428 | |||
429 | DruidUtil.save(writeConnection, mapOnlineInfo, "sohu_online_info"); | ||
430 | } | ||
431 | } | ||
432 | } | ||
433 | } | ||
434 | } | ||
435 | } | ||
436 | mapAlbum.put("category_id", category_id); | ||
437 | mapAlbum.put("cate_code", cate_code); | ||
438 | mapAlbum.put("updated", updated); | ||
439 | String sql = "SELECT id, episode_updated FROM sohu_album WHERE album_id = ?"; | ||
440 | Map<String, Object> mapExist = DruidUtil.queryUniqueResult(readConnection, sql, | ||
441 | mapAlbum.get("album_id")); | ||
442 | if (mapExist == null) { | ||
443 | mapAlbum.put("xml_path", file.getPath()); | ||
444 | DruidUtil.save(writeConnection, mapAlbum, "sohu_album"); | ||
445 | log.info("album saved: " + mapAlbum.get("album_id") + " [" + | ||
446 | mapAlbum.get("album_name") + "]"); | ||
447 | } else { | ||
448 | mapAlbum.put("id", mapExist.get("id")); | ||
449 | if ((Integer) mapAlbum.get("episode_updated") < (Integer) mapExist.get("episode_updated")) { | ||
450 | mapAlbum.put("episode_updated", mapExist.get("episode_updated")); | ||
451 | } | ||
452 | |||
453 | mapAlbum.put("xml_path", file.getPath()); | ||
454 | DruidUtil.update(writeConnection, mapAlbum, "sohu_album", "id"); | ||
455 | log.info("Album: " + mapAlbum.get("album_id") + " already exists, just update"); | ||
456 | |||
457 | if (mapAlbum.get("is_show") != mapExist.get("is_show")) { | ||
458 | Map<String, Object> mapOnlineInfo = new HashMap<>(); | ||
459 | mapOnlineInfo.put("album_id", mapAlbum.get("album_id")); | ||
460 | mapOnlineInfo.put("is_online", mapAlbum.get("is_show")); | ||
461 | DruidUtil.save(writeConnection, mapOnlineInfo, "sohu_online_info"); | ||
462 | } | ||
463 | } | ||
464 | } | ||
465 | } | ||
466 | DruidUtil.commitTransaction(writeConnection); | ||
467 | } else { | ||
468 | log.info("Download xml failed"); | ||
469 | } | ||
470 | |||
471 | } catch (Exception ex) { | ||
472 | ex.printStackTrace(); | ||
473 | log.error("failure in metaDataIncrementInjection", ex); | ||
474 | } finally { | ||
475 | DruidUtil.close(readConnection); | ||
476 | DruidUtil.close(writeConnection); | ||
477 | } | ||
478 | } | ||
479 | |||
480 | private String getCategoryByVideo(Map<String, Object> map) { | ||
481 | String str = ""; | ||
482 | Connection readConnection = null; | ||
483 | try { | ||
484 | readConnection = DruidUtil.getRandomReadConnection(); | ||
485 | String sql = "SELECT cate_code FROM sohu_album WHERE album_id = ?"; | ||
486 | Map<String, Object> mapAlbum = DruidUtil.queryUniqueResult(readConnection, sql, map.get("album_id")); | ||
487 | int iCateCode = (Integer) mapAlbum.get("cate_code"); | ||
488 | |||
489 | switch (iCateCode) { | ||
490 | case 100: | ||
491 | if ((Integer) map.get("fee") == 0) { | ||
492 | str = "film"; | ||
493 | } else { | ||
494 | str = "feeFilm"; | ||
495 | } | ||
496 | break; | ||
497 | case 101: | ||
498 | str = "drama"; | ||
499 | break; | ||
500 | case 106: | ||
501 | str = "variety"; | ||
502 | break; | ||
503 | case 107: | ||
504 | str = "documentary"; | ||
505 | break; | ||
506 | case 115: | ||
507 | str = "cartoon"; | ||
508 | break; | ||
509 | } | ||
510 | } catch (Exception ex) { | ||
511 | ex.printStackTrace(); | ||
512 | } finally { | ||
513 | DruidUtil.close(readConnection); | ||
514 | } | ||
515 | return str; | ||
516 | } | ||
517 | |||
518 | public boolean downloadM3U8ByVideoId(long lVideoId) { | ||
519 | Connection readConnection = null; | ||
520 | Connection writeConnection = null; | ||
521 | try { | ||
522 | readConnection = DruidUtil.getRandomReadConnection(); | ||
523 | writeConnection = DruidUtil.getRandomWriteConnection(); | ||
524 | |||
525 | String sql = "SELECT id, album_id, video_id, definition, tvPlayType, fee FROM sohu_video WHERE video_id = ?"; | ||
526 | Map<String, Object> mapVideo = DruidUtil.queryUniqueResult(readConnection, sql, lVideoId); | ||
527 | |||
528 | String strDefinition = mapVideo.get("definition").toString(); | ||
529 | String[] arr = strDefinition.split(","); | ||
530 | |||
531 | String strCategory = getCategoryByVideo(mapVideo) + File.separatorChar; | ||
532 | |||
533 | int iDefinition = 2; | ||
534 | if (!strDefinition.contains(",")) { | ||
535 | iDefinition = Integer.parseInt(strDefinition); | ||
536 | } else { | ||
537 | for (int i = 0; i < arr.length; i++) { | ||
538 | if (i == arr.length - 1) { | ||
539 | iDefinition = Integer.parseInt(arr[arr.length - 1]); | ||
540 | break; | ||
541 | } | ||
542 | if (Integer.parseInt(arr[i]) > Integer.parseInt(arr[i + 1])) { | ||
543 | if (Integer.parseInt(arr[i]) != 2) { | ||
544 | arr[i + 1] = arr[i]; | ||
545 | } | ||
546 | } | ||
547 | } | ||
548 | } | ||
549 | |||
550 | if (mapVideo.get("m3u8_path") == null) { | ||
551 | String strParam = "albumId=" + mapVideo.get("album_id") + "&versionId=" + iDefinition + | ||
552 | "&tvVerId=" + lVideoId; | ||
553 | |||
554 | String strResponse = WebUtil.sendGet(M3U8_URL, strParam, "UTF-8"); | ||
555 | JSONObject jsonObject = JSONObject.parseObject(strResponse); | ||
556 | if (jsonObject.getIntValue("status") != 200) { | ||
557 | log.info("fail to get m3u8 info from sohu by video_id: " + lVideoId); | ||
558 | mapVideo.put("m3u8_path", jsonObject.getIntValue("status")); | ||
559 | DruidUtil.update(writeConnection, mapVideo, "sohu_video", "id"); | ||
560 | return false; | ||
561 | } else { | ||
562 | if (jsonObject.get("data") == null) { | ||
563 | log.info("fail to get m3u8 [200 null] info from sohu by video_id: " + lVideoId); | ||
564 | mapVideo.put("m3u8_path", "200 null"); | ||
565 | DruidUtil.update(writeConnection, mapVideo, "sohu_video", "id"); | ||
566 | return false; | ||
567 | } | ||
568 | |||
569 | String str = jsonObject.getString("data"); | ||
570 | ByteArrayInputStream is = new ByteArrayInputStream(str.getBytes()); | ||
571 | |||
572 | SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); | ||
573 | String strDate = sdf.format(new Date()) + File.separatorChar; | ||
574 | |||
575 | log.info("start downloading m3u8..."); | ||
576 | |||
577 | String strPath = PROJECT_PATH + M3U8_RELATIVE_PATH + strDate + strCategory + | ||
578 | lVideoId + "_" + iDefinition + ".m3u8"; | ||
579 | File file = new File(strPath); | ||
580 | if (!file.getParentFile().exists()) { | ||
581 | file.getParentFile().mkdirs(); | ||
582 | } | ||
583 | OutputStream os = new FileOutputStream(file); | ||
584 | int iBytesRead; | ||
585 | byte[] buffer = new byte[8192]; | ||
586 | while ((iBytesRead = is.read(buffer, 0, 8192)) != -1) { | ||
587 | os.write(buffer, 0, iBytesRead); | ||
588 | } | ||
589 | os.close(); | ||
590 | is.close(); | ||
591 | |||
592 | log.info("downloaded successfully: " + file.getPath()); | ||
593 | |||
594 | mapVideo.put("m3u8_path", strPath); | ||
595 | DruidUtil.update(writeConnection, mapVideo, "sohu_video", "id"); | ||
596 | } | ||
597 | } else { | ||
598 | log.info("m3u8 already downloaded"); | ||
599 | } | ||
600 | |||
601 | } catch (Exception ex) { | ||
602 | ex.printStackTrace(); | ||
603 | log.error("fail to downloadM3U8ByVideoId: " + lVideoId, ex); | ||
604 | } finally { | ||
605 | DruidUtil.close(readConnection); | ||
606 | DruidUtil.close(writeConnection); | ||
607 | } | ||
608 | return true; | ||
609 | } | ||
610 | |||
611 | /* | ||
612 | private M3U8 getM3U8ByURL(String strM3U8URL) { | ||
613 | try { | ||
614 | File file = new File(strM3U8URL); | ||
615 | |||
616 | BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(file))); | ||
617 | String strBasePath = strM3U8URL.substring(0, strM3U8URL.lastIndexOf(File.separatorChar) + 1); | ||
618 | M3U8 m3u8Ret = new M3U8(); | ||
619 | m3u8Ret.setStrBasePath(strBasePath); | ||
620 | |||
621 | String strLine; | ||
622 | float fSeconds = 0; | ||
623 | int iIndex; | ||
624 | while ((strLine = reader.readLine()) != null) { | ||
625 | if (strLine.startsWith("#")) { | ||
626 | if (strLine.startsWith("#EXTINF:")) { | ||
627 | strLine = strLine.substring(8); | ||
628 | if ((iIndex = strLine.indexOf(",")) != -1) { | ||
629 | strLine = strLine.substring(0, iIndex); | ||
630 | } | ||
631 | try { | ||
632 | fSeconds = Float.parseFloat(strLine); | ||
633 | } catch (Exception e) { | ||
634 | fSeconds = 0; | ||
635 | } | ||
636 | } | ||
637 | continue; | ||
638 | } | ||
639 | if (strLine.endsWith("m3u8")) { | ||
640 | return getM3U8ByURL(strBasePath + strLine); | ||
641 | } | ||
642 | m3u8Ret.addTs(new M3U8.Ts(strLine, fSeconds)); | ||
643 | fSeconds = 0; | ||
644 | } | ||
645 | reader.close(); | ||
646 | |||
647 | return m3u8Ret; | ||
648 | |||
649 | } catch (IOException e) { | ||
650 | // TODO Auto-generated catch block | ||
651 | e.printStackTrace(); | ||
652 | } | ||
653 | return null; | ||
654 | } | ||
655 | |||
656 | public void downloadTSHLSByVideoId(long lVideoId) { | ||
657 | Connection readConnection = null; | ||
658 | Connection writeConnection = null; | ||
659 | try { | ||
660 | readConnection = DruidUtil.getRandomReadConnection(); | ||
661 | writeConnection = DruidUtil.getRandomWriteConnection(); | ||
662 | |||
663 | String sql = "SELECT id, m3u8_path FROM sohu_video WHERE video_id = ?"; | ||
664 | Map<String, Object> mapVideo = DruidUtil.queryUniqueResult(readConnection, sql, lVideoId); | ||
665 | if (mapVideo == null || mapVideo.get("m3u8_path") == null || mapVideo.get("m3u8_path").toString().equals("")) { | ||
666 | log.info("invalid video_id: " + lVideoId); | ||
667 | return; | ||
668 | } | ||
669 | SohuUtil util = new SohuUtil(); | ||
670 | M3U8 m3u8 = util.getM3U8ByURL(mapVideo.get("m3u8_path").toString()); | ||
671 | String strBasePath = m3u8.getStrBasePath(); | ||
672 | SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); | ||
673 | String strDate = sdf.format(new Date()) + File.separatorChar; | ||
674 | m3u8.getListTs().parallelStream().forEachOrdered(ts -> { | ||
675 | //log.info(m3u8.getListTs().indexOf(ts) + ".ts"); | ||
676 | |||
677 | File file = new File(PROJECT_PATH + TS_HLS_RELATIVE_PATH + strDate + lVideoId + File.separatorChar + | ||
678 | m3u8.getListTs().indexOf(ts) + ".ts"); | ||
679 | if (!file.exists()) {// 下载过的就不管了 | ||
680 | File fileTs = HttpConnectionUtil.downloadFile(ts.getStrFilePath(), | ||
681 | PROJECT_PATH + TS_HLS_RELATIVE_PATH + strDate + lVideoId + File.separatorChar, | ||
682 | m3u8.getListTs().indexOf(ts) + ".ts"); | ||
683 | if (fileTs == null) { | ||
684 | log.info("fail to Download ts: " + ts.getStrFilePath()); | ||
685 | } else { | ||
686 | log.info("ts hls downloaded: " + m3u8.getListTs().indexOf(ts) + ".ts"); | ||
687 | } | ||
688 | } | ||
689 | }); | ||
690 | log.info("Video Downloaded: " + lVideoId); | ||
691 | |||
692 | // mapVideo.put("m3u8_path", strPath); | ||
693 | // DruidUtil.update(writeConnection, mapVideo, "sohu_video", "id"); | ||
694 | |||
695 | } catch (Exception ex) { | ||
696 | ex.printStackTrace(); | ||
697 | log.error("fail to downloadTSHLSByVideoId", ex); | ||
698 | } finally { | ||
699 | DruidUtil.close(readConnection); | ||
700 | DruidUtil.close(writeConnection); | ||
701 | } | ||
702 | } | ||
703 | |||
704 | public void downloadM3U8AndTSHLS(long lVideoId) { | ||
705 | Connection readConnection = null; | ||
706 | Connection writeConnection = null; | ||
707 | try { | ||
708 | readConnection = DruidUtil.getRandomReadConnection(); | ||
709 | writeConnection = DruidUtil.getRandomWriteConnection(); | ||
710 | |||
711 | String sql = "SELECT id, album_id, video_id, definition, m3u8_path FROM sohu_video WHERE video_id = ?"; | ||
712 | Map<String, Object> mapVideo = DruidUtil.queryUniqueResult(readConnection, sql, lVideoId); | ||
713 | if (mapVideo == null) { | ||
714 | log.info("invalid video_id: " + lVideoId); | ||
715 | return; | ||
716 | } | ||
717 | |||
718 | SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd"); | ||
719 | String strDate = sdf.format(new Date()) + File.separatorChar; | ||
720 | |||
721 | String strDefinition = mapVideo.get("definition").toString(); | ||
722 | String[] arr = strDefinition.split(","); | ||
723 | int iDefinition = 2; | ||
724 | if (!strDefinition.contains(",")) { | ||
725 | iDefinition = Integer.parseInt(strDefinition); | ||
726 | } else { | ||
727 | for (int i = 0; i < arr.length; i++) { | ||
728 | if (i == arr.length - 1) { | ||
729 | iDefinition = Integer.parseInt(arr[arr.length - 1]); | ||
730 | break; | ||
731 | } | ||
732 | if (Integer.parseInt(arr[i]) > Integer.parseInt(arr[i + 1])) { | ||
733 | if (Integer.parseInt(arr[i]) != 2) { | ||
734 | arr[i + 1] = arr[i]; | ||
735 | } | ||
736 | } | ||
737 | } | ||
738 | } | ||
739 | |||
740 | if (mapVideo.get("m3u8_path") == null) { | ||
741 | |||
742 | String strParam = "albumId=" + mapVideo.get("album_id") + "&versionId=" + iDefinition + | ||
743 | "&tvVerId=" + lVideoId; | ||
744 | |||
745 | String strResponse = WebUtil.sendGet(M3U8_URL, strParam, "UTF-8"); | ||
746 | JSONObject jsonObject = JSONObject.parseObject(strResponse); | ||
747 | if (jsonObject.getIntValue("status") != 200) { | ||
748 | log.info("fail to get m3u8 info from sohu by video_id: " + lVideoId); | ||
749 | return; | ||
750 | } else { | ||
751 | String str = jsonObject.getString("data"); | ||
752 | ByteArrayInputStream is = new ByteArrayInputStream(str.getBytes()); | ||
753 | |||
754 | log.info("start downloading m3u8..."); | ||
755 | String strPath = PROJECT_PATH + M3U8_RELATIVE_PATH + strDate + lVideoId + "_" + iDefinition + ".m3u8"; | ||
756 | File file = new File(strPath); | ||
757 | if (!file.getParentFile().exists()) { | ||
758 | file.getParentFile().mkdirs(); | ||
759 | } | ||
760 | OutputStream os = new FileOutputStream(file); | ||
761 | int iBytesRead; | ||
762 | byte[] buffer = new byte[8192]; | ||
763 | while ((iBytesRead = is.read(buffer, 0, 8192)) != -1) { | ||
764 | os.write(buffer, 0, iBytesRead); | ||
765 | } | ||
766 | os.close(); | ||
767 | is.close(); | ||
768 | |||
769 | log.info("downloaded successfully: " + file.getPath()); | ||
770 | |||
771 | mapVideo.put("m3u8_path", strPath); | ||
772 | DruidUtil.update(writeConnection, mapVideo, "sohu_video", "id"); | ||
773 | } | ||
774 | } else { | ||
775 | log.info("m3u8 already downloaded, ready to download ts hls..."); | ||
776 | } | ||
777 | |||
778 | |||
779 | SohuUtil util = new SohuUtil(); | ||
780 | M3U8 m3u8 = util.getM3U8ByURL(mapVideo.get("m3u8_path").toString()); | ||
781 | String strBasePath = m3u8.getStrBasePath(); | ||
782 | String strTsBaseUrl = PROJECT_PATH + TS_HLS_RELATIVE_PATH + strDate + lVideoId + "_" + iDefinition + | ||
783 | File.separatorChar; | ||
784 | m3u8.getListTs().parallelStream().forEachOrdered(ts -> { | ||
785 | //log.info(m3u8.getListTs().indexOf(ts) + ".ts"); | ||
786 | |||
787 | File fileExists = new File(strTsBaseUrl + "ts_" + fillZero(m3u8.getListTs().indexOf(ts), 4) + ".ts"); | ||
788 | if (!fileExists.exists()) {// 下载过的就不管了 | ||
789 | File fileTs = null; | ||
790 | while (fileTs == null) { | ||
791 | fileTs = HttpConnectionUtil.downloadFile(ts.getStrFilePath(), | ||
792 | strTsBaseUrl, "ts_" + fillZero(m3u8.getListTs().indexOf(ts), 4) + ".ts"); | ||
793 | if (fileTs == null) { | ||
794 | log.info("fail to Download ts: " + ts.getStrFilePath() + ", ready to retry"); | ||
795 | } else { | ||
796 | log.info("ts hls downloaded: " + fileTs.getPath()); | ||
797 | } | ||
798 | } | ||
799 | } | ||
800 | }); | ||
801 | log.info("Video Downloaded: " + lVideoId); | ||
802 | mapVideo.put("ts_path", strTsBaseUrl); | ||
803 | DruidUtil.update(writeConnection, mapVideo, "sohu_video", "id"); | ||
804 | |||
805 | //log.info("start merging..."); | ||
806 | //mergeFiles(new File(mapVideo.get("ts_path").toString()), lVideoId + ".ts"); | ||
807 | |||
808 | } catch (Exception ex) { | ||
809 | ex.printStackTrace(); | ||
810 | log.error("fail to downloadM3U8TS", ex); | ||
811 | } finally { | ||
812 | DruidUtil.close(readConnection); | ||
813 | DruidUtil.close(writeConnection); | ||
814 | } | ||
815 | } | ||
816 | |||
817 | public boolean mergeFiles(File file, String strResultPath) { | ||
818 | File[] fPaths = file.listFiles(); | ||
819 | if (fPaths == null || fPaths.length < 1) { | ||
820 | return false; | ||
821 | } | ||
822 | |||
823 | if (fPaths.length == 1) { | ||
824 | return fPaths[0].renameTo(new File(strResultPath)); | ||
825 | } | ||
826 | for (int i = 0; i < fPaths.length; i++) { | ||
827 | if (!fPaths[i].exists() || !fPaths[i].isFile()) { | ||
828 | return false; | ||
829 | } | ||
830 | } | ||
831 | List<File> listFile = Arrays.asList(fPaths); | ||
832 | //Collections.sort(listFile, new Comparator<File>() { | ||
833 | // @Override | ||
834 | // public int compare(File o1, File o2) { | ||
835 | // if (o1.isDirectory() && o2.isFile()) | ||
836 | // return -1; | ||
837 | // if (o1.isFile() && o2.isDirectory()) | ||
838 | // return 1; | ||
839 | // return Integer.parseInt(o1.getName().split("\\.")[0]) - Integer.parseInt(o2.getName().split("\\.")[0]); | ||
840 | // } | ||
841 | //}); | ||
842 | |||
843 | File fileResult = new File(file.getPath() + File.separatorChar + "full_" + strResultPath); | ||
844 | try { | ||
845 | //FileOutputStream fs = new FileOutputStream(fileResult, true); | ||
846 | //FileChannel fileResultChannel = fs.getChannel(); | ||
847 | //FileInputStream fis; | ||
848 | //for (int i = 0; i < listFile.size(); i++) { | ||
849 | // fis = new FileInputStream(listFile.get(i)); | ||
850 | // FileChannel blk = fis.getChannel(); | ||
851 | // fileResultChannel.transferFrom(blk, fileResultChannel.size(), blk.size()); | ||
852 | // fis.close(); | ||
853 | // blk.close(); | ||
854 | // if (i % 20 == 0) { | ||
855 | // log.info(i + " ts merged"); | ||
856 | // } | ||
857 | //} | ||
858 | //fs.close(); | ||
859 | //fileResultChannel.close(); | ||
860 | |||
861 | fileResult.createNewFile(); | ||
862 | RandomAccessFile raf = new RandomAccessFile(fileResult, "rw"); | ||
863 | raf.setLength(0); | ||
864 | raf.seek(0); | ||
865 | byte[] bytes = new byte[1024]; | ||
866 | int len = -1; | ||
867 | for(int i = 0; i < listFile.size(); i++) { | ||
868 | RandomAccessFile out = new RandomAccessFile(listFile.get(i), "rw"); | ||
869 | while((len = out.read(bytes)) != -1) { | ||
870 | raf.write(bytes, 0, len); | ||
871 | } | ||
872 | out.close(); | ||
873 | if (i != 1 && (i + 1) % 20 == 0) { | ||
874 | log.info((i + 1) + " ts merged"); | ||
875 | } | ||
876 | } | ||
877 | raf.close(); | ||
878 | |||
879 | log.info("merged successfully: " + fileResult.getPath()); | ||
880 | } catch (Exception e) { | ||
881 | e.printStackTrace(); | ||
882 | return false; | ||
883 | } | ||
884 | |||
885 | // for (int i = 0; i < fpaths.length; i ++) { | ||
886 | // fpaths[i].delete(); | ||
887 | // } | ||
888 | |||
889 | return true; | ||
890 | } | ||
891 | |||
892 | private static String fillZero (Integer i, Integer iTotal) { | ||
893 | String strRet = ""; | ||
894 | String str = i.toString(); | ||
895 | if (str.length() >= iTotal) { | ||
896 | strRet = str; | ||
897 | } else { | ||
898 | for (int j = 0; j < iTotal - str.length(); j++) { | ||
899 | strRet += "0"; | ||
900 | } | ||
901 | strRet += str; | ||
902 | } | ||
903 | return strRet; | ||
904 | } | ||
905 | |||
906 | public void metaDataIncrementInjectionByXml(String strDatePath) { | ||
907 | metaDataIncrementInjectionByXml(strDatePath + File.separatorChar, FILM_XML_URL, strDatePath + "09"); | ||
908 | metaDataIncrementInjectionByXml(strDatePath + File.separatorChar, FILM_XML_URL, strDatePath + "21"); | ||
909 | |||
910 | metaDataIncrementInjectionByXml(strDatePath + File.separatorChar, DRAMA_XML_URL, strDatePath + "09"); | ||
911 | metaDataIncrementInjectionByXml(strDatePath + File.separatorChar, DRAMA_XML_URL, strDatePath + "21"); | ||
912 | |||
913 | metaDataIncrementInjectionByXml(strDatePath + File.separatorChar, VARIETY_XML_URL, strDatePath + "09"); | ||
914 | metaDataIncrementInjectionByXml(strDatePath + File.separatorChar, VARIETY_XML_URL, strDatePath + "21"); | ||
915 | |||
916 | metaDataIncrementInjectionByXml(strDatePath + File.separatorChar, DOCUMENTARY_XML_URL, strDatePath + "09"); | ||
917 | metaDataIncrementInjectionByXml(strDatePath + File.separatorChar, DOCUMENTARY_XML_URL, strDatePath + "21"); | ||
918 | |||
919 | metaDataIncrementInjectionByXml(strDatePath + File.separatorChar, CARTOON_XML_URL, strDatePath + "09"); | ||
920 | metaDataIncrementInjectionByXml(strDatePath + File.separatorChar, CARTOON_XML_URL, strDatePath + "21"); | ||
921 | |||
922 | metaDataIncrementInjectionByXml(strDatePath + File.separatorChar, FEEFILM_XML_URL, strDatePath + "09"); | ||
923 | metaDataIncrementInjectionByXml(strDatePath + File.separatorChar, FEEFILM_XML_URL, strDatePath + "21"); | ||
924 | } | ||
925 | |||
926 | private void metaDataIncrementInjectionByXml(String strDatePath, String strURL, String strDate) { | ||
927 | Connection readConnection = null; | ||
928 | Connection writeConnection = null; | ||
929 | try { | ||
930 | readConnection = DruidUtil.getRandomReadConnection(); | ||
931 | writeConnection = DruidUtil.getRandomWriteConnection(); | ||
932 | |||
933 | String strCategory = strURL.substring(strURL.lastIndexOf("/") + 1, strURL.lastIndexOf(".")); | ||
934 | |||
935 | File file = new File(PROJECT_PATH + XML_RELATIVE_PATH + strDatePath + | ||
936 | strCategory + File.separatorChar, strCategory + "_" + strDate + ".xml"); | ||
937 | |||
938 | if (file != null) { | ||
939 | SAXReader reader = new SAXReader(); | ||
940 | Document document = reader.read(file.getPath()); | ||
941 | Element root = document.getRootElement(); | ||
942 | |||
943 | int category_id = 0; | ||
944 | int cate_code = 0; | ||
945 | Date updated = new Date(); | ||
946 | SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); | ||
947 | |||
948 | DruidUtil.beginTransaction(writeConnection); | ||
949 | for (Iterator<?> it = root.elementIterator(); it.hasNext();) { | ||
950 | Element element = (Element) it.next(); | ||
951 | if ("category_id".equals(element.getName())) { | ||
952 | category_id = Integer.parseInt(element.getText()); | ||
953 | } | ||
954 | if ("cate_code".equals(element.getName())) { | ||
955 | cate_code = Integer.parseInt(element.getText()); | ||
956 | } | ||
957 | if ("updated".equals(element.getName())) { | ||
958 | updated = sdf.parse(element.getText()); | ||
959 | } | ||
960 | if ("album".equals(element.getName())) { | ||
961 | Map<String, Object> mapAlbum = new HashMap<>(); | ||
962 | for (Iterator<?> itAlbum = element.elementIterator(); itAlbum.hasNext();) { | ||
963 | Element elAlbum = (Element) itAlbum.next(); | ||
964 | if (elAlbum.getName().equals("publish_time")) { | ||
965 | mapAlbum.put(elAlbum.getName(), elAlbum.getText().equals("") ? null : sdf.parse( | ||
966 | elAlbum.getText())); | ||
967 | } else if (elAlbum.getName().equals("episode_updated") || | ||
968 | elAlbum.getName().equals("episode_total") || elAlbum.getName().equals("year") || | ||
969 | elAlbum.getName().equals("fee") || elAlbum.getName().equals("is_clip") || | ||
970 | elAlbum.getName().equals("is_show") || elAlbum.getName().equals("is_early")) { | ||
971 | mapAlbum.put(elAlbum.getName(), elAlbum.getText().equals("") ? 0 : Integer.parseInt( | ||
972 | elAlbum.getText() | ||
973 | )); | ||
974 | } else if (!elAlbum.getName().equals("videos")) { | ||
975 | mapAlbum.put(elAlbum.getName().equals("update_time") ? "album_update_time" : | ||
976 | elAlbum.getName(), elAlbum.getText().equals("") ? null : elAlbum.getText()); | ||
977 | } else { | ||
978 | for (Iterator<?> itVideos = elAlbum.elementIterator(); itVideos.hasNext();) { | ||
979 | Element elVideos = (Element) itVideos.next(); | ||
980 | if (elVideos.getName().equals("video")) { | ||
981 | Map<String, Object> mapVideo = new HashMap<>(); | ||
982 | for (Iterator<?> itVideo = elVideos.elementIterator(); itVideo.hasNext();) { | ||
983 | Element elVideo = (Element) itVideo.next(); | ||
984 | if (elVideo.getName().equals("issue_time")) { | ||
985 | mapVideo.put(elVideo.getName(), elVideo.getText().equals("") ? null : | ||
986 | sdf.parse(elVideo.getText())); | ||
987 | } else if (elVideo.getName().equals("play_order") || | ||
988 | elVideo.getName().equals("time_length") || | ||
989 | elVideo.getName().equals("is_show") || | ||
990 | elVideo.getName().equals("fee") || | ||
991 | elVideo.getName().equals("tvPlayType")) { | ||
992 | //log.info(elVideo.getName() + " " + mapAlbum.get("name")); | ||
993 | mapVideo.put(elVideo.getName(), elVideo.getText().equals("") ? 0 : | ||
994 | Integer.parseInt(elVideo.getText())); | ||
995 | } else if (elVideo.getName().equals("start_time") || | ||
996 | elVideo.getName().equals("end_time")) { | ||
997 | |||
998 | } else if (!elVideo.getName().equals("logoInfo")) { | ||
999 | mapVideo.put(elVideo.getName().equals("update_time") ? | ||
1000 | "video_update_time" : elVideo.getName(), | ||
1001 | elVideo.getText().equals("") ? null : elVideo.getText()); | ||
1002 | } else { | ||
1003 | for (Iterator<?> itLog = elVideo.elementIterator(); itLog.hasNext();) { | ||
1004 | Element elLog = (Element) itLog.next(); | ||
1005 | if (elLog.getName().equals("dimension")) { | ||
1006 | mapVideo.put(elLog.getName(), elLog.getText()); | ||
1007 | } else { | ||
1008 | mapVideo.put(elLog.getName(), elLog.getText().equals("") ? | ||
1009 | 0 : Integer.parseInt(elLog.getText())); | ||
1010 | } | ||
1011 | } | ||
1012 | } | ||
1013 | } | ||
1014 | mapVideo.put("album_id", mapAlbum.get("album_id")); | ||
1015 | String sql = "SELECT id, is_show, m3u8_path FROM sohu_video WHERE video_id = ?"; | ||
1016 | Map<String, Object> mapExist = DruidUtil.queryUniqueResult(readConnection, | ||
1017 | sql, mapVideo.get("video_id")); | ||
1018 | if (mapExist == null) { | ||
1019 | mapVideo.put("xml_path", file.getPath()); | ||
1020 | DruidUtil.save(writeConnection, mapVideo, "sohu_video"); | ||
1021 | log.info("video saved: " + mapVideo.get("video_id") + " [" + | ||
1022 | mapVideo.get("video_name") + "]"); | ||
1023 | } else { | ||
1024 | mapVideo.put("id", mapExist.get("id")); | ||
1025 | |||
1026 | if (mapExist.get("m3u8_path") != null && | ||
1027 | mapExist.get("m3u8_path").toString().equals("500")) { | ||
1028 | // 重新触发下载m3u8_path | ||
1029 | mapVideo.put("m3u8_path", null); | ||
1030 | } | ||
1031 | |||
1032 | mapVideo.put("xml_path", file.getPath()); | ||
1033 | DruidUtil.update(writeConnection, mapVideo, "sohu_video", "id"); | ||
1034 | log.info("video: " + mapVideo.get("video_id") + | ||
1035 | " already exists, just update"); | ||
1036 | if (mapVideo.get("is_show") != mapExist.get("is_show")) { | ||
1037 | Map<String, Object> mapOnlineInfo = new HashMap<>(); | ||
1038 | mapOnlineInfo.put("video_id", mapVideo.get("video_id")); | ||
1039 | mapOnlineInfo.put("is_online", mapVideo.get("is_show")); | ||
1040 | |||
1041 | DruidUtil.save(writeConnection, mapOnlineInfo, "sohu_online_info"); | ||
1042 | } | ||
1043 | } | ||
1044 | } | ||
1045 | } | ||
1046 | } | ||
1047 | } | ||
1048 | mapAlbum.put("category_id", category_id); | ||
1049 | mapAlbum.put("cate_code", cate_code); | ||
1050 | mapAlbum.put("updated", updated); | ||
1051 | String sql = "SELECT id, episode_updated FROM sohu_album WHERE album_id = ?"; | ||
1052 | Map<String, Object> mapExist = DruidUtil.queryUniqueResult(readConnection, sql, | ||
1053 | mapAlbum.get("album_id")); | ||
1054 | if (mapExist == null) { | ||
1055 | mapAlbum.put("xml_path", file.getPath()); | ||
1056 | DruidUtil.save(writeConnection, mapAlbum, "sohu_album"); | ||
1057 | log.info("album saved: " + mapAlbum.get("album_id") + " [" + | ||
1058 | mapAlbum.get("album_name") + "]"); | ||
1059 | } else { | ||
1060 | mapAlbum.put("id", mapExist.get("id")); | ||
1061 | if ((Integer) mapAlbum.get("episode_updated") < (Integer) mapExist.get("episode_updated")) { | ||
1062 | mapAlbum.put("episode_updated", mapExist.get("episode_updated")); | ||
1063 | } | ||
1064 | |||
1065 | mapAlbum.put("xml_path", file.getPath()); | ||
1066 | DruidUtil.update(writeConnection, mapAlbum, "sohu_album", "id"); | ||
1067 | log.info("Album: " + mapAlbum.get("album_id") + " already exists, just update"); | ||
1068 | |||
1069 | if (mapAlbum.get("is_show") != mapExist.get("is_show")) { | ||
1070 | Map<String, Object> mapOnlineInfo = new HashMap<>(); | ||
1071 | mapOnlineInfo.put("album_id", mapAlbum.get("album_id")); | ||
1072 | mapOnlineInfo.put("is_online", mapAlbum.get("is_show")); | ||
1073 | DruidUtil.save(writeConnection, mapOnlineInfo, "sohu_online_info"); | ||
1074 | } | ||
1075 | } | ||
1076 | } | ||
1077 | } | ||
1078 | DruidUtil.commitTransaction(writeConnection); | ||
1079 | } else { | ||
1080 | log.info("Download xml failed"); | ||
1081 | } | ||
1082 | |||
1083 | } catch (Exception ex) { | ||
1084 | ex.printStackTrace(); | ||
1085 | log.error("failure in metaDataIncrementInjection", ex); | ||
1086 | } finally { | ||
1087 | DruidUtil.close(readConnection); | ||
1088 | DruidUtil.close(writeConnection); | ||
1089 | } | ||
1090 | } | ||
1091 | */ | ||
1092 | |||
1093 | private static void fullMetaDataInjection(String strAllPath, int iTotal) { | ||
1094 | switch (strAllPath) { | ||
1095 | case "film": | ||
1096 | strAllPath = FILM_ALL_XML_URL; | ||
1097 | break; | ||
1098 | case "drama": | ||
1099 | strAllPath = DRAMA_ALL_XML_URL; | ||
1100 | break; | ||
1101 | case "variety": | ||
1102 | strAllPath = VARIETY_ALL_XML_URL; | ||
1103 | break; | ||
1104 | case "documentary": | ||
1105 | strAllPath = DOCUMENTARY_ALL_XML_URL; | ||
1106 | break; | ||
1107 | case "cartoon": | ||
1108 | strAllPath = CARTOON_ALL_XML_URL; | ||
1109 | break; | ||
1110 | case "feeFilm": | ||
1111 | strAllPath = FEEFILM_ALL_XML_URL; | ||
1112 | break; | ||
1113 | default: | ||
1114 | System.out.println("uncorrect sohu category"); | ||
1115 | return; | ||
1116 | } | ||
1117 | |||
1118 | SohuUtil util = new SohuUtil(); | ||
1119 | for (int i = 1; i <= iTotal; i++) { | ||
1120 | String strUrl = strAllPath + i + ".xml"; | ||
1121 | System.out.println("start injection: " + strUrl); | ||
1122 | util.fullMetaDataInjection(strUrl); | ||
1123 | } | ||
1124 | } | ||
1125 | |||
1126 | public static void main(String[] args) { | ||
1127 | //SohuUtil util = new SohuUtil(); | ||
1128 | //util.fullSelfProducedMetaDataUpdate("http://ott.hd.sohu.com/hd/houyi/sohu_film.xml"); | ||
1129 | |||
1130 | if (args.length != 2) { | ||
1131 | System.out.println("wrong number of arguments"); | ||
1132 | } else { | ||
1133 | String strAllPath = args[0]; | ||
1134 | String strTotal = args[1]; | ||
1135 | fullMetaDataInjection(strAllPath, Integer.parseInt(strTotal)); | ||
1136 | } | ||
1137 | |||
1138 | //SohuUtil util = new SohuUtil(); | ||
1139 | //for (int i = 1; i <= 2; i++) { | ||
1140 | // String strUrl = "http://ott.hd.sohu.com/hd/all/fee_film_all_" + i + ".xml"; | ||
1141 | // System.out.println("start injection: " + strUrl); | ||
1142 | // util.fullMetaDataInjection(strUrl); | ||
1143 | //} | ||
1144 | //util.downloadM3U8TS(4372); | ||
1145 | //util.downloadTSHLSByVideoId(4372); | ||
1146 | //util.downloadM3U8AndTSHLS(3000504); | ||
1147 | //util.mergeFiles(new File("E:\\MyWorkSpace\\Eclipse\\hyperion\\hyperion.admin/ts\\20180607\\3000504_21\\"), | ||
1148 | // "3000504.ts"); | ||
1149 | //util.metaDataIncrementInjection(DRAMA_XML_URL); | ||
1150 | |||
1151 | //Connection readConnection = null; | ||
1152 | //try { | ||
1153 | // readConnection = DruidUtil.getRandomReadConnection(); | ||
1154 | // String sql = "SELECT * FROM sohu_video WHERE album_id = ?"; | ||
1155 | // List<Map<String, Object>> list = DruidUtil.queryList(readConnection, sql, 225); | ||
1156 | // File file = new File("225.txt"); | ||
1157 | // if (!file.exists()) { | ||
1158 | // file.createNewFile(); | ||
1159 | // } | ||
1160 | // FileWriter fw = new FileWriter(file.getAbsoluteFile()); | ||
1161 | // BufferedWriter bw = new BufferedWriter(fw); | ||
1162 | // for (Map<String, Object> map : list) { | ||
1163 | // bw.write(map.get("m3u8_path") + "\r\n"); | ||
1164 | // } | ||
1165 | // bw.close(); | ||
1166 | // | ||
1167 | //} catch (Exception ex) { | ||
1168 | // ex.printStackTrace(); | ||
1169 | //} finally { | ||
1170 | // DruidUtil.close(readConnection); | ||
1171 | //} | ||
1172 | |||
1173 | //util.downloadM3U8ByVideoId(683180); | ||
1174 | //util.metaDataIncrementInjection(VARIETY_XML_URL); | ||
1175 | } | ||
1176 | |||
1177 | } |
1 | package com.topdraw.sohu.utils; | ||
2 | |||
3 | import org.dom4j.Document; | ||
4 | import org.dom4j.DocumentHelper; | ||
5 | import org.dom4j.Element; | ||
6 | import org.dom4j.io.OutputFormat; | ||
7 | import org.dom4j.io.XMLWriter; | ||
8 | import org.slf4j.Logger; | ||
9 | import org.slf4j.LoggerFactory; | ||
10 | |||
11 | import java.io.File; | ||
12 | import java.io.FileOutputStream; | ||
13 | import java.text.SimpleDateFormat; | ||
14 | import java.util.Date; | ||
15 | import java.util.UUID; | ||
16 | |||
17 | public class XmlUtils { | ||
18 | private static final Logger logger = LoggerFactory.getLogger(XmlUtils.class); | ||
19 | public static final String FTP_PATH = "/app/proftpd/smp/"; | ||
20 | public static final String XML_RELATIVE_PATH = "notify/xml/"; | ||
21 | |||
22 | public String generateNotifyXml(int iResult, String strErrorDescripiton) { | ||
23 | String strRet = ""; | ||
24 | Date date = new Date(); | ||
25 | SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd"); | ||
26 | String strDate = sdf.format(date) + "/"; | ||
27 | String strRelativePath = XML_RELATIVE_PATH + strDate + UUID.randomUUID() + ".xml"; | ||
28 | String strPath = FTP_PATH + strRelativePath; | ||
29 | |||
30 | File file = new File(strPath); | ||
31 | if (!file.isDirectory()) { // 目录不存在 | ||
32 | String[] aPathSegments = strPath.split("/"); | ||
33 | String strWalkThroughPath = "/"; | ||
34 | for (int i = 0; i < aPathSegments.length - 1; i++) { | ||
35 | strWalkThroughPath = strWalkThroughPath + "/" + aPathSegments[i]; | ||
36 | file = new File(strWalkThroughPath); | ||
37 | if (!file.isDirectory()) { | ||
38 | file.mkdir(); | ||
39 | } | ||
40 | } | ||
41 | } | ||
42 | |||
43 | Document document = DocumentHelper.createDocument(); | ||
44 | Element el = document.addElement("xsi:ADI"); | ||
45 | el.addAttribute("xmlns:xsi", "http://www.w3.org/2001/XMLSchema-instance"); | ||
46 | Element elReply = el.addElement("Reply"); | ||
47 | Element elPropertyResult = elReply.addElement("Property"); | ||
48 | Element elPropertyDescription = elReply.addElement("Property"); | ||
49 | elPropertyResult.addAttribute("Name", "Result"); | ||
50 | elPropertyDescription.addAttribute("Name", "Description"); | ||
51 | elPropertyResult.setText(iResult + ""); | ||
52 | elPropertyDescription.setText(strErrorDescripiton); | ||
53 | |||
54 | OutputFormat format = OutputFormat.createPrettyPrint(); | ||
55 | format.setEncoding("UTF-8"); | ||
56 | format.setNewLineAfterDeclaration(false); | ||
57 | try { | ||
58 | XMLWriter writer = new XMLWriter(new FileOutputStream(strPath), format); | ||
59 | writer.setEscapeText(false); | ||
60 | writer.write(document); | ||
61 | writer.close(); | ||
62 | } catch (Exception ex) { | ||
63 | ex.printStackTrace(); | ||
64 | } | ||
65 | return PropertiesUtil.get("project.ftp_base_url") + strRelativePath; | ||
66 | |||
67 | } | ||
68 | |||
69 | public static void main(String[] args) { | ||
70 | |||
71 | } | ||
72 | } |
src/main/resources/META-INF/MANIFEST.MF
0 → 100644
src/main/resources/app.properties
0 → 100644
1 | #project | ||
2 | project.name = sohu.port | ||
3 | project.version = v0.0.1 | ||
4 | |||
5 | #### DB Configuration ################# | ||
6 | db.read_datasource_count=1 | ||
7 | db.write_datasource_count=1 | ||
8 | |||
9 | #### Global Configuration ############# | ||
10 | |||
11 | Global.CrossDomainAllowOrigin=* | ||
12 | Global.CircumstanceLevel=1 | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
src/main/resources/druid_read_00.properties
0 → 100644
1 | name=druid_read_01 | ||
2 | driverClassName = com.mysql.jdbc.Driver | ||
3 | |||
4 | url = jdbc:mysql://139.196.188.223:3306/hyperion?useUnicode=true&characterEncoding=UTF-8&useOldAliasMetadataBehavior=true&useSSL=false&autoReconnect=true | ||
5 | username = druid_read | ||
6 | password = zyRead! | ||
7 | |||
8 | initialSize = 5 | ||
9 | maxActive = 20 | ||
10 | minIdle = 3 | ||
11 | autoReconnect=true | ||
12 | |||
13 | #获取连接时最大等待时间,单位毫秒。配置了maxWait之后, | ||
14 | #缺省启用公平锁,并发效率会有所下降, | ||
15 | #如果需要可以通过配置useUnfairLock属性为true使用非公平锁。 | ||
16 | maxWait = 60000 | ||
17 | |||
18 | removeAbandoned = true | ||
19 | removeAbandonedTimeout = 3600 | ||
20 | |||
21 | #有两个含义: | ||
22 | #1) Destroy线程会检测连接的间隔时间 | ||
23 | #2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明 | ||
24 | timeBetweenEvictionRunsMillis = 60000 | ||
25 | minEvictableIdleTimeMillis = 300000 | ||
26 | validationQuery = SELECT 1 FROM DUAL | ||
27 | #空闲时测试 | ||
28 | #如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会其作用。 | ||
29 | testWhileIdle = true | ||
30 | testOnBorrow = false | ||
31 | testOnReturn = false | ||
32 | poolPreparedStatements = false | ||
33 | maxPoolPreparedStatementPerConnectionSize = 50 | ||
34 | |||
35 | #属性类型是字符串,通过别名的方式配置扩展插件, | ||
36 | #常用的插件有:监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall | ||
37 | filters = stat | ||
38 | |||
39 | #要启用PSCache,必须配置大于0,当大于0时, | ||
40 | #poolPreparedStatements自动触发修改为true。 | ||
41 | maxOpenPreparedStatements = -1 | ||
42 | |||
43 | |||
44 | |||
45 | |||
46 | #物理连接初始化的时候执行的sql | ||
47 | #connectionInitSqls= | ||
48 | |||
49 | #根据dbType自动识别 当数据库抛出一些不可恢复的异常时,抛弃连接 | ||
50 | #exceptionSorter= | ||
51 |
src/main/resources/druid_write_00.properties
0 → 100644
1 | name=druid_write_01 | ||
2 | driverClassName=com.mysql.jdbc.Driver | ||
3 | |||
4 | url = jdbc:mysql://139.196.188.223:3306/hyperion?useUnicode=true&characterEncoding=UTF-8&useOldAliasMetadataBehavior=true&useSSL=false&autoReconnect=true | ||
5 | username = druid_write | ||
6 | password = zyWrite! | ||
7 | |||
8 | initialSize=5 | ||
9 | maxActive=20 | ||
10 | minIdle=3 | ||
11 | autoReconnect=true | ||
12 | |||
13 | #获取连接时最大等待时间,单位毫秒。配置了maxWait之后, | ||
14 | #缺省启用公平锁,并发效率会有所下降, | ||
15 | #如果需要可以通过配置useUnfairLock属性为true使用非公平锁。 | ||
16 | maxWait=60000 | ||
17 | |||
18 | removeAbandoned=true | ||
19 | removeAbandonedTimeout=3600 | ||
20 | |||
21 | #有两个含义: | ||
22 | #1) Destroy线程会检测连接的间隔时间 | ||
23 | #2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明 | ||
24 | timeBetweenEvictionRunsMillis=60000 | ||
25 | minEvictableIdleTimeMillis=300000 | ||
26 | validationQuery=SELECT 1 FROM DUAL | ||
27 | #空闲时测试 | ||
28 | #如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会其作用。 | ||
29 | testWhileIdle = true | ||
30 | testOnBorrow=false | ||
31 | testOnReturn=false | ||
32 | poolPreparedStatements=false | ||
33 | maxPoolPreparedStatementPerConnectionSize=50 | ||
34 | |||
35 | #属性类型是字符串,通过别名的方式配置扩展插件, | ||
36 | #常用的插件有:监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall | ||
37 | filters=stat | ||
38 | |||
39 | #要启用PSCache,必须配置大于0,当大于0时, | ||
40 | #poolPreparedStatements自动触发修改为true。 | ||
41 | maxOpenPreparedStatements = -1 | ||
42 | |||
43 | |||
44 | |||
45 | |||
46 | #物理连接初始化的时候执行的sql | ||
47 | #connectionInitSqls= | ||
48 | |||
49 | #根据dbType自动识别 当数据库抛出一些不可恢复的异常时,抛弃连接 | ||
50 | #exceptionSorter= | ||
51 |
src/main/resources/log4j.xml
0 → 100644
1 | <?xml version="1.0" encoding="UTF-8"?> | ||
2 | <!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> | ||
3 | |||
4 | |||
5 | <!-- ========================== 自定义输出格式说明=================test=============== --> | ||
6 | <!-- %p 输出优先级,即DEBUG,INFO,WARN,ERROR,FATAL --> | ||
7 | <!-- %r 输出自应用启动到输出该log信息耗费的毫秒数 --> | ||
8 | <!-- %c 输出所属的类目,通常就是所在类的全名 --> | ||
9 | <!-- %t 输出产生该日志事件的线程名 --> | ||
10 | <!-- %n 输出一个回车换行符,Windows平台为“/r/n”,Unix平台为“/n” --> | ||
11 | <!-- %d 输出日志时间点的日期或时间,默认格式为ISO8601,也可以在其后指定格式,比如:%d{yyy MMM dd HH:mm:ss,SSS},输出类似:2002年10月18日 | ||
12 | 22:10:28,921 --> | ||
13 | <!-- %l 输出日志事件的发生位置,包括类目名、发生的线程,以及在代码中的行数。举例:Testlog4.main(TestLog4.java:10) --> | ||
14 | <!-- ========================================================================== --> | ||
15 | <!-- ========================== 输出方式说明================================ --> | ||
16 | <!-- Log4j提供的appender有以下几种: --> | ||
17 | <!-- org.apache.log4j.ConsoleAppender(控制台), --> | ||
18 | <!-- org.apache.log4j.FileAppender(文件), --> | ||
19 | <!-- org.apache.log4j.DailyRollingFileAppender(每天产生一个日志文件), --> | ||
20 | <!-- org.apache.log4j.RollingFileAppender(文件大小到达指定尺寸的时候产生一个新的文件), --> | ||
21 | <!-- org.apache.log4j.WriterAppender(将日志信息以流格式发送到任意指定的地方) --> | ||
22 | <!-- ========================================================================== --> | ||
23 | |||
24 | |||
25 | <log4j:configuration xmlns:log4j='http://jakarta.apache.org/log4j/'> | ||
26 | <appender name="cAppender" class="org.apache.log4j.ConsoleAppender"> | ||
27 | <layout class="org.apache.log4j.PatternLayout"> | ||
28 | <param name="ConversionPattern" value="[%d{dd HH:mm:ss,SSS} %-5p] [%t] %c{1}:%m%n" /> | ||
29 | </layout> | ||
30 | <!--过滤器设置输出的级别 --> | ||
31 | <filter class="org.apache.log4j.varia.LevelRangeFilter"> | ||
32 | <param name="levelMin" value="debug" /> | ||
33 | <param name="levelMax" value="error" /> | ||
34 | <param name="AcceptOnMatch" value="true" /> | ||
35 | </filter> | ||
36 | </appender> | ||
37 | |||
38 | <!-- <appender name="rfAppender" class="org.apache.log4j.RollingFileAppender"> | ||
39 | <param name="File" value="./log/sohu.port.log" /> <param name="Append" | ||
40 | value="true" /> <param name="MaxBackupIndex" value="10" /> <layout class="org.apache.log4j.PatternLayout"> | ||
41 | <param name="ConversionPattern" value="%p (%c:%L)- %m%n" /> </layout> </appender> --> | ||
42 | |||
43 | <appender name="drfInfoAppender" class="org.apache.log4j.DailyRollingFileAppender"> | ||
44 | <param name="File" value="${catalina.base}/logs/sohu.port.info" /> | ||
45 | <param name="DatePattern" value="'.'yyyy-MM-dd'.log'" /> | ||
46 | <layout class="org.apache.log4j.PatternLayout"> | ||
47 | <param name="ConversionPattern" | ||
48 | value="[%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p] [%t] %c{1}:%m%n" /> | ||
49 | </layout> | ||
50 | <filter class="org.apache.log4j.varia.LevelRangeFilter"> | ||
51 | <param name="levelMin" value="info" /> | ||
52 | <param name="levelMax" value="error" /> | ||
53 | <param name="AcceptOnMatch" value="true" /> | ||
54 | </filter> | ||
55 | </appender> | ||
56 | |||
57 | <appender name="drfErrorAppender" class="org.apache.log4j.DailyRollingFileAppender"> | ||
58 | <param name="File" value="${catalina.base}/logs/sohu.port.error" /> | ||
59 | <param name="DatePattern" value="'.'yyyy-MM-dd'.log'" /> | ||
60 | <layout class="org.apache.log4j.PatternLayout"> | ||
61 | <param name="ConversionPattern" | ||
62 | value="[%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p] [%t] %c{1}:%m%n" /> | ||
63 | </layout> | ||
64 | <filter class="org.apache.log4j.varia.LevelRangeFilter"> | ||
65 | <param name="levelMin" value="warning" /> | ||
66 | <param name="levelMax" value="error" /> | ||
67 | <param name="AcceptOnMatch" value="true" /> | ||
68 | </filter> | ||
69 | |||
70 | </appender> | ||
71 | |||
72 | |||
73 | |||
74 | <!-- 指定logger的设置,additivity指示是否遵循缺省的继承机制 <logger name="test.TaskCommandHandler" | ||
75 | additivity="false"> <priority value ="info"/> <appender-ref ref="cAppender" | ||
76 | /> <appender-ref ref="drfAppender" /> </logger> --> | ||
77 | |||
78 | <!-- 根logger的设置 --> | ||
79 | <root> | ||
80 | <priority value="info" /> | ||
81 | <appender-ref ref="cAppender" /> | ||
82 | <appender-ref ref="drfInfoAppender" /> | ||
83 | <!--<appender-ref ref="drfErrorAppender"/> --> | ||
84 | |||
85 | </root> | ||
86 | </log4j:configuration> | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
src/main/resources/quartz.properties
0 → 100644
1 | org.quartz.scheduler.instanceName = Sohu Interface Scheduler | ||
2 | org.quartz.scheduler.instanceId = 01 | ||
3 | |||
4 | org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool | ||
5 | org.quartz.threadPool.threadCount = 10 | ||
6 | |||
7 | org.quartz.jobStore.class = org.quartz.simpl.RAMJobStore | ||
8 | |||
9 | org.quartz.plugin.jobInitializer.class = org.quartz.plugins.xml.XMLSchedulingDataProcessorPlugin | ||
10 | org.quartz.plugin.jobInitializer.fileNames = quartz_jobs.xml | ||
11 | org.quartz.plugin.jobInitializer.failOnFileNotFound = true | ||
12 | |||
13 | org.quartz.scheduler.jmx.export = true | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
src/main/resources/quartz_jobs.xml
0 → 100644
1 | <?xml version='1.0' encoding='utf-8'?> | ||
2 | <job-scheduling-data | ||
3 | xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" | ||
4 | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||
5 | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_2_0.xsd" | ||
6 | version="2.0"> | ||
7 | <schedule> | ||
8 | <job> | ||
9 | <name>M3U8Job</name> | ||
10 | <group>M3U8JobGroup</group> | ||
11 | <description>M3U8Job</description> | ||
12 | <job-class>com.topdraw.sohu.port.job.M3U8Job</job-class> | ||
13 | </job> | ||
14 | <trigger> | ||
15 | <simple> | ||
16 | <name>M3U8JobTrigger</name> | ||
17 | <group>M3U8JobGroup</group> | ||
18 | <job-name>M3U8Job</job-name> | ||
19 | <job-group>M3U8JobGroup</job-group> | ||
20 | <start-time-seconds-in-future>5</start-time-seconds-in-future> | ||
21 | <repeat-count>-1</repeat-count> | ||
22 | <repeat-interval>30000</repeat-interval> | ||
23 | </simple> | ||
24 | </trigger> | ||
25 | |||
26 | <job> | ||
27 | <name>MetaDataJob</name> | ||
28 | <group>MetaDataJobGroup</group> | ||
29 | <description>MetaDataJob</description> | ||
30 | <job-class>com.topdraw.sohu.port.job.MetaDataJob</job-class> | ||
31 | </job> | ||
32 | <trigger> | ||
33 | <cron> | ||
34 | <name>MetaDataJobTrigger</name> | ||
35 | <group>MetaDataJobGroup</group> | ||
36 | <job-name>MetaDataJob</job-name> | ||
37 | <job-group>MetaDataJobGroup</job-group> | ||
38 | <cron-expression>0 55 * * * ?</cron-expression> | ||
39 | </cron> | ||
40 | </trigger> | ||
41 | </schedule> | ||
42 | </job-scheduling-data> | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
src/main/webapp/WEB-INF/web.xml
0 → 100644
target/classes/META-INF/MANIFEST.MF
0 → 100644
target/classes/app.properties
0 → 100644
1 | #project | ||
2 | project.name = sohu.port | ||
3 | project.version = v0.0.1 | ||
4 | |||
5 | #### DB Configuration ################# | ||
6 | db.read_datasource_count=1 | ||
7 | db.write_datasource_count=1 | ||
8 | |||
9 | #### Global Configuration ############# | ||
10 | |||
11 | Global.CrossDomainAllowOrigin=* | ||
12 | Global.CircumstanceLevel=1 | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
target/classes/druid_read_00.properties
0 → 100644
1 | name=druid_read_01 | ||
2 | driverClassName = com.mysql.jdbc.Driver | ||
3 | |||
4 | url = jdbc:mysql://139.196.188.223:3306/hyperion?useUnicode=true&characterEncoding=UTF-8&useOldAliasMetadataBehavior=true&useSSL=false&autoReconnect=true | ||
5 | username = druid_read | ||
6 | password = zyRead! | ||
7 | |||
8 | initialSize = 5 | ||
9 | maxActive = 20 | ||
10 | minIdle = 3 | ||
11 | autoReconnect=true | ||
12 | |||
13 | #获取连接时最大等待时间,单位毫秒。配置了maxWait之后, | ||
14 | #缺省启用公平锁,并发效率会有所下降, | ||
15 | #如果需要可以通过配置useUnfairLock属性为true使用非公平锁。 | ||
16 | maxWait = 60000 | ||
17 | |||
18 | removeAbandoned = true | ||
19 | removeAbandonedTimeout = 3600 | ||
20 | |||
21 | #有两个含义: | ||
22 | #1) Destroy线程会检测连接的间隔时间 | ||
23 | #2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明 | ||
24 | timeBetweenEvictionRunsMillis = 60000 | ||
25 | minEvictableIdleTimeMillis = 300000 | ||
26 | validationQuery = SELECT 1 FROM DUAL | ||
27 | #空闲时测试 | ||
28 | #如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会其作用。 | ||
29 | testWhileIdle = true | ||
30 | testOnBorrow = false | ||
31 | testOnReturn = false | ||
32 | poolPreparedStatements = false | ||
33 | maxPoolPreparedStatementPerConnectionSize = 50 | ||
34 | |||
35 | #属性类型是字符串,通过别名的方式配置扩展插件, | ||
36 | #常用的插件有:监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall | ||
37 | filters = stat | ||
38 | |||
39 | #要启用PSCache,必须配置大于0,当大于0时, | ||
40 | #poolPreparedStatements自动触发修改为true。 | ||
41 | maxOpenPreparedStatements = -1 | ||
42 | |||
43 | |||
44 | |||
45 | |||
46 | #物理连接初始化的时候执行的sql | ||
47 | #connectionInitSqls= | ||
48 | |||
49 | #根据dbType自动识别 当数据库抛出一些不可恢复的异常时,抛弃连接 | ||
50 | #exceptionSorter= | ||
51 |
target/classes/druid_write_00.properties
0 → 100644
1 | name=druid_write_01 | ||
2 | driverClassName=com.mysql.jdbc.Driver | ||
3 | |||
4 | url = jdbc:mysql://139.196.188.223:3306/hyperion?useUnicode=true&characterEncoding=UTF-8&useOldAliasMetadataBehavior=true&useSSL=false&autoReconnect=true | ||
5 | username = druid_write | ||
6 | password = zyWrite! | ||
7 | |||
8 | initialSize=5 | ||
9 | maxActive=20 | ||
10 | minIdle=3 | ||
11 | autoReconnect=true | ||
12 | |||
13 | #获取连接时最大等待时间,单位毫秒。配置了maxWait之后, | ||
14 | #缺省启用公平锁,并发效率会有所下降, | ||
15 | #如果需要可以通过配置useUnfairLock属性为true使用非公平锁。 | ||
16 | maxWait=60000 | ||
17 | |||
18 | removeAbandoned=true | ||
19 | removeAbandonedTimeout=3600 | ||
20 | |||
21 | #有两个含义: | ||
22 | #1) Destroy线程会检测连接的间隔时间 | ||
23 | #2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明 | ||
24 | timeBetweenEvictionRunsMillis=60000 | ||
25 | minEvictableIdleTimeMillis=300000 | ||
26 | validationQuery=SELECT 1 FROM DUAL | ||
27 | #空闲时测试 | ||
28 | #如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会其作用。 | ||
29 | testWhileIdle = true | ||
30 | testOnBorrow=false | ||
31 | testOnReturn=false | ||
32 | poolPreparedStatements=false | ||
33 | maxPoolPreparedStatementPerConnectionSize=50 | ||
34 | |||
35 | #属性类型是字符串,通过别名的方式配置扩展插件, | ||
36 | #常用的插件有:监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall | ||
37 | filters=stat | ||
38 | |||
39 | #要启用PSCache,必须配置大于0,当大于0时, | ||
40 | #poolPreparedStatements自动触发修改为true。 | ||
41 | maxOpenPreparedStatements = -1 | ||
42 | |||
43 | |||
44 | |||
45 | |||
46 | #物理连接初始化的时候执行的sql | ||
47 | #connectionInitSqls= | ||
48 | |||
49 | #根据dbType自动识别 当数据库抛出一些不可恢复的异常时,抛弃连接 | ||
50 | #exceptionSorter= | ||
51 |
target/classes/log4j.xml
0 → 100644
1 | <?xml version="1.0" encoding="UTF-8"?> | ||
2 | <!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> | ||
3 | |||
4 | |||
5 | <!-- ========================== 自定义输出格式说明=================test=============== --> | ||
6 | <!-- %p 输出优先级,即DEBUG,INFO,WARN,ERROR,FATAL --> | ||
7 | <!-- %r 输出自应用启动到输出该log信息耗费的毫秒数 --> | ||
8 | <!-- %c 输出所属的类目,通常就是所在类的全名 --> | ||
9 | <!-- %t 输出产生该日志事件的线程名 --> | ||
10 | <!-- %n 输出一个回车换行符,Windows平台为“/r/n”,Unix平台为“/n” --> | ||
11 | <!-- %d 输出日志时间点的日期或时间,默认格式为ISO8601,也可以在其后指定格式,比如:%d{yyy MMM dd HH:mm:ss,SSS},输出类似:2002年10月18日 | ||
12 | 22:10:28,921 --> | ||
13 | <!-- %l 输出日志事件的发生位置,包括类目名、发生的线程,以及在代码中的行数。举例:Testlog4.main(TestLog4.java:10) --> | ||
14 | <!-- ========================================================================== --> | ||
15 | <!-- ========================== 输出方式说明================================ --> | ||
16 | <!-- Log4j提供的appender有以下几种: --> | ||
17 | <!-- org.apache.log4j.ConsoleAppender(控制台), --> | ||
18 | <!-- org.apache.log4j.FileAppender(文件), --> | ||
19 | <!-- org.apache.log4j.DailyRollingFileAppender(每天产生一个日志文件), --> | ||
20 | <!-- org.apache.log4j.RollingFileAppender(文件大小到达指定尺寸的时候产生一个新的文件), --> | ||
21 | <!-- org.apache.log4j.WriterAppender(将日志信息以流格式发送到任意指定的地方) --> | ||
22 | <!-- ========================================================================== --> | ||
23 | |||
24 | |||
25 | <log4j:configuration xmlns:log4j='http://jakarta.apache.org/log4j/'> | ||
26 | <appender name="cAppender" class="org.apache.log4j.ConsoleAppender"> | ||
27 | <layout class="org.apache.log4j.PatternLayout"> | ||
28 | <param name="ConversionPattern" value="[%d{dd HH:mm:ss,SSS} %-5p] [%t] %c{1}:%m%n" /> | ||
29 | </layout> | ||
30 | <!--过滤器设置输出的级别 --> | ||
31 | <filter class="org.apache.log4j.varia.LevelRangeFilter"> | ||
32 | <param name="levelMin" value="debug" /> | ||
33 | <param name="levelMax" value="error" /> | ||
34 | <param name="AcceptOnMatch" value="true" /> | ||
35 | </filter> | ||
36 | </appender> | ||
37 | |||
38 | <!-- <appender name="rfAppender" class="org.apache.log4j.RollingFileAppender"> | ||
39 | <param name="File" value="./log/sohu.port.log" /> <param name="Append" | ||
40 | value="true" /> <param name="MaxBackupIndex" value="10" /> <layout class="org.apache.log4j.PatternLayout"> | ||
41 | <param name="ConversionPattern" value="%p (%c:%L)- %m%n" /> </layout> </appender> --> | ||
42 | |||
43 | <appender name="drfInfoAppender" class="org.apache.log4j.DailyRollingFileAppender"> | ||
44 | <param name="File" value="${catalina.base}/logs/sohu.port.info" /> | ||
45 | <param name="DatePattern" value="'.'yyyy-MM-dd'.log'" /> | ||
46 | <layout class="org.apache.log4j.PatternLayout"> | ||
47 | <param name="ConversionPattern" | ||
48 | value="[%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p] [%t] %c{1}:%m%n" /> | ||
49 | </layout> | ||
50 | <filter class="org.apache.log4j.varia.LevelRangeFilter"> | ||
51 | <param name="levelMin" value="info" /> | ||
52 | <param name="levelMax" value="error" /> | ||
53 | <param name="AcceptOnMatch" value="true" /> | ||
54 | </filter> | ||
55 | </appender> | ||
56 | |||
57 | <appender name="drfErrorAppender" class="org.apache.log4j.DailyRollingFileAppender"> | ||
58 | <param name="File" value="${catalina.base}/logs/sohu.port.error" /> | ||
59 | <param name="DatePattern" value="'.'yyyy-MM-dd'.log'" /> | ||
60 | <layout class="org.apache.log4j.PatternLayout"> | ||
61 | <param name="ConversionPattern" | ||
62 | value="[%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p] [%t] %c{1}:%m%n" /> | ||
63 | </layout> | ||
64 | <filter class="org.apache.log4j.varia.LevelRangeFilter"> | ||
65 | <param name="levelMin" value="warning" /> | ||
66 | <param name="levelMax" value="error" /> | ||
67 | <param name="AcceptOnMatch" value="true" /> | ||
68 | </filter> | ||
69 | |||
70 | </appender> | ||
71 | |||
72 | |||
73 | |||
74 | <!-- 指定logger的设置,additivity指示是否遵循缺省的继承机制 <logger name="test.TaskCommandHandler" | ||
75 | additivity="false"> <priority value ="info"/> <appender-ref ref="cAppender" | ||
76 | /> <appender-ref ref="drfAppender" /> </logger> --> | ||
77 | |||
78 | <!-- 根logger的设置 --> | ||
79 | <root> | ||
80 | <priority value="info" /> | ||
81 | <appender-ref ref="cAppender" /> | ||
82 | <appender-ref ref="drfInfoAppender" /> | ||
83 | <!--<appender-ref ref="drfErrorAppender"/> --> | ||
84 | |||
85 | </root> | ||
86 | </log4j:configuration> | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
target/classes/quartz.properties
0 → 100644
1 | org.quartz.scheduler.instanceName = Sohu Interface Scheduler | ||
2 | org.quartz.scheduler.instanceId = 01 | ||
3 | |||
4 | org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool | ||
5 | org.quartz.threadPool.threadCount = 10 | ||
6 | |||
7 | org.quartz.jobStore.class = org.quartz.simpl.RAMJobStore | ||
8 | |||
9 | org.quartz.plugin.jobInitializer.class = org.quartz.plugins.xml.XMLSchedulingDataProcessorPlugin | ||
10 | org.quartz.plugin.jobInitializer.fileNames = quartz_jobs.xml | ||
11 | org.quartz.plugin.jobInitializer.failOnFileNotFound = true | ||
12 | |||
13 | org.quartz.scheduler.jmx.export = true | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
target/classes/quartz_jobs.xml
0 → 100644
1 | <?xml version='1.0' encoding='utf-8'?> | ||
2 | <job-scheduling-data | ||
3 | xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" | ||
4 | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||
5 | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_2_0.xsd" | ||
6 | version="2.0"> | ||
7 | <schedule> | ||
8 | <job> | ||
9 | <name>M3U8Job</name> | ||
10 | <group>M3U8JobGroup</group> | ||
11 | <description>M3U8Job</description> | ||
12 | <job-class>com.topdraw.sohu.port.job.M3U8Job</job-class> | ||
13 | </job> | ||
14 | <trigger> | ||
15 | <simple> | ||
16 | <name>M3U8JobTrigger</name> | ||
17 | <group>M3U8JobGroup</group> | ||
18 | <job-name>M3U8Job</job-name> | ||
19 | <job-group>M3U8JobGroup</job-group> | ||
20 | <start-time-seconds-in-future>5</start-time-seconds-in-future> | ||
21 | <repeat-count>-1</repeat-count> | ||
22 | <repeat-interval>30000</repeat-interval> | ||
23 | </simple> | ||
24 | </trigger> | ||
25 | |||
26 | <job> | ||
27 | <name>MetaDataJob</name> | ||
28 | <group>MetaDataJobGroup</group> | ||
29 | <description>MetaDataJob</description> | ||
30 | <job-class>com.topdraw.sohu.port.job.MetaDataJob</job-class> | ||
31 | </job> | ||
32 | <trigger> | ||
33 | <cron> | ||
34 | <name>MetaDataJobTrigger</name> | ||
35 | <group>MetaDataJobGroup</group> | ||
36 | <job-name>MetaDataJob</job-name> | ||
37 | <job-group>MetaDataJobGroup</job-group> | ||
38 | <cron-expression>0 55 * * * ?</cron-expression> | ||
39 | </cron> | ||
40 | </trigger> | ||
41 | </schedule> | ||
42 | </job-scheduling-data> | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
target/port.war
0 → 100644
No preview for this file type
target/port/META-INF/MANIFEST.MF
0 → 100644
target/port/WEB-INF/classes/app.properties
0 → 100644
1 | #project | ||
2 | project.name = sohu.port | ||
3 | project.version = v0.0.1 | ||
4 | |||
5 | #### DB Configuration ################# | ||
6 | db.read_datasource_count=1 | ||
7 | db.write_datasource_count=1 | ||
8 | |||
9 | #### Global Configuration ############# | ||
10 | |||
11 | Global.CrossDomainAllowOrigin=* | ||
12 | Global.CircumstanceLevel=1 | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
1 | name=druid_read_01 | ||
2 | driverClassName = com.mysql.jdbc.Driver | ||
3 | |||
4 | url = jdbc:mysql://139.196.188.223:3306/hyperion?useUnicode=true&characterEncoding=UTF-8&useOldAliasMetadataBehavior=true&useSSL=false&autoReconnect=true | ||
5 | username = druid_read | ||
6 | password = zyRead! | ||
7 | |||
8 | initialSize = 5 | ||
9 | maxActive = 20 | ||
10 | minIdle = 3 | ||
11 | autoReconnect=true | ||
12 | |||
13 | #获取连接时最大等待时间,单位毫秒。配置了maxWait之后, | ||
14 | #缺省启用公平锁,并发效率会有所下降, | ||
15 | #如果需要可以通过配置useUnfairLock属性为true使用非公平锁。 | ||
16 | maxWait = 60000 | ||
17 | |||
18 | removeAbandoned = true | ||
19 | removeAbandonedTimeout = 3600 | ||
20 | |||
21 | #有两个含义: | ||
22 | #1) Destroy线程会检测连接的间隔时间 | ||
23 | #2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明 | ||
24 | timeBetweenEvictionRunsMillis = 60000 | ||
25 | minEvictableIdleTimeMillis = 300000 | ||
26 | validationQuery = SELECT 1 FROM DUAL | ||
27 | #空闲时测试 | ||
28 | #如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会其作用。 | ||
29 | testWhileIdle = true | ||
30 | testOnBorrow = false | ||
31 | testOnReturn = false | ||
32 | poolPreparedStatements = false | ||
33 | maxPoolPreparedStatementPerConnectionSize = 50 | ||
34 | |||
35 | #属性类型是字符串,通过别名的方式配置扩展插件, | ||
36 | #常用的插件有:监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall | ||
37 | filters = stat | ||
38 | |||
39 | #要启用PSCache,必须配置大于0,当大于0时, | ||
40 | #poolPreparedStatements自动触发修改为true。 | ||
41 | maxOpenPreparedStatements = -1 | ||
42 | |||
43 | |||
44 | |||
45 | |||
46 | #物理连接初始化的时候执行的sql | ||
47 | #connectionInitSqls= | ||
48 | |||
49 | #根据dbType自动识别 当数据库抛出一些不可恢复的异常时,抛弃连接 | ||
50 | #exceptionSorter= | ||
51 |
1 | name=druid_write_01 | ||
2 | driverClassName=com.mysql.jdbc.Driver | ||
3 | |||
4 | url = jdbc:mysql://139.196.188.223:3306/hyperion?useUnicode=true&characterEncoding=UTF-8&useOldAliasMetadataBehavior=true&useSSL=false&autoReconnect=true | ||
5 | username = druid_write | ||
6 | password = zyWrite! | ||
7 | |||
8 | initialSize=5 | ||
9 | maxActive=20 | ||
10 | minIdle=3 | ||
11 | autoReconnect=true | ||
12 | |||
13 | #获取连接时最大等待时间,单位毫秒。配置了maxWait之后, | ||
14 | #缺省启用公平锁,并发效率会有所下降, | ||
15 | #如果需要可以通过配置useUnfairLock属性为true使用非公平锁。 | ||
16 | maxWait=60000 | ||
17 | |||
18 | removeAbandoned=true | ||
19 | removeAbandonedTimeout=3600 | ||
20 | |||
21 | #有两个含义: | ||
22 | #1) Destroy线程会检测连接的间隔时间 | ||
23 | #2) testWhileIdle的判断依据,详细看testWhileIdle属性的说明 | ||
24 | timeBetweenEvictionRunsMillis=60000 | ||
25 | minEvictableIdleTimeMillis=300000 | ||
26 | validationQuery=SELECT 1 FROM DUAL | ||
27 | #空闲时测试 | ||
28 | #如果validationQuery为null,testOnBorrow、testOnReturn、testWhileIdle都不会其作用。 | ||
29 | testWhileIdle = true | ||
30 | testOnBorrow=false | ||
31 | testOnReturn=false | ||
32 | poolPreparedStatements=false | ||
33 | maxPoolPreparedStatementPerConnectionSize=50 | ||
34 | |||
35 | #属性类型是字符串,通过别名的方式配置扩展插件, | ||
36 | #常用的插件有:监控统计用的filter:stat 日志用的filter:log4j 防御sql注入的filter:wall | ||
37 | filters=stat | ||
38 | |||
39 | #要启用PSCache,必须配置大于0,当大于0时, | ||
40 | #poolPreparedStatements自动触发修改为true。 | ||
41 | maxOpenPreparedStatements = -1 | ||
42 | |||
43 | |||
44 | |||
45 | |||
46 | #物理连接初始化的时候执行的sql | ||
47 | #connectionInitSqls= | ||
48 | |||
49 | #根据dbType自动识别 当数据库抛出一些不可恢复的异常时,抛弃连接 | ||
50 | #exceptionSorter= | ||
51 |
target/port/WEB-INF/classes/log4j.xml
0 → 100644
1 | <?xml version="1.0" encoding="UTF-8"?> | ||
2 | <!DOCTYPE log4j:configuration SYSTEM "log4j.dtd"> | ||
3 | |||
4 | |||
5 | <!-- ========================== 自定义输出格式说明=================test=============== --> | ||
6 | <!-- %p 输出优先级,即DEBUG,INFO,WARN,ERROR,FATAL --> | ||
7 | <!-- %r 输出自应用启动到输出该log信息耗费的毫秒数 --> | ||
8 | <!-- %c 输出所属的类目,通常就是所在类的全名 --> | ||
9 | <!-- %t 输出产生该日志事件的线程名 --> | ||
10 | <!-- %n 输出一个回车换行符,Windows平台为“/r/n”,Unix平台为“/n” --> | ||
11 | <!-- %d 输出日志时间点的日期或时间,默认格式为ISO8601,也可以在其后指定格式,比如:%d{yyy MMM dd HH:mm:ss,SSS},输出类似:2002年10月18日 | ||
12 | 22:10:28,921 --> | ||
13 | <!-- %l 输出日志事件的发生位置,包括类目名、发生的线程,以及在代码中的行数。举例:Testlog4.main(TestLog4.java:10) --> | ||
14 | <!-- ========================================================================== --> | ||
15 | <!-- ========================== 输出方式说明================================ --> | ||
16 | <!-- Log4j提供的appender有以下几种: --> | ||
17 | <!-- org.apache.log4j.ConsoleAppender(控制台), --> | ||
18 | <!-- org.apache.log4j.FileAppender(文件), --> | ||
19 | <!-- org.apache.log4j.DailyRollingFileAppender(每天产生一个日志文件), --> | ||
20 | <!-- org.apache.log4j.RollingFileAppender(文件大小到达指定尺寸的时候产生一个新的文件), --> | ||
21 | <!-- org.apache.log4j.WriterAppender(将日志信息以流格式发送到任意指定的地方) --> | ||
22 | <!-- ========================================================================== --> | ||
23 | |||
24 | |||
25 | <log4j:configuration xmlns:log4j='http://jakarta.apache.org/log4j/'> | ||
26 | <appender name="cAppender" class="org.apache.log4j.ConsoleAppender"> | ||
27 | <layout class="org.apache.log4j.PatternLayout"> | ||
28 | <param name="ConversionPattern" value="[%d{dd HH:mm:ss,SSS} %-5p] [%t] %c{1}:%m%n" /> | ||
29 | </layout> | ||
30 | <!--过滤器设置输出的级别 --> | ||
31 | <filter class="org.apache.log4j.varia.LevelRangeFilter"> | ||
32 | <param name="levelMin" value="debug" /> | ||
33 | <param name="levelMax" value="error" /> | ||
34 | <param name="AcceptOnMatch" value="true" /> | ||
35 | </filter> | ||
36 | </appender> | ||
37 | |||
38 | <!-- <appender name="rfAppender" class="org.apache.log4j.RollingFileAppender"> | ||
39 | <param name="File" value="./log/sohu.port.log" /> <param name="Append" | ||
40 | value="true" /> <param name="MaxBackupIndex" value="10" /> <layout class="org.apache.log4j.PatternLayout"> | ||
41 | <param name="ConversionPattern" value="%p (%c:%L)- %m%n" /> </layout> </appender> --> | ||
42 | |||
43 | <appender name="drfInfoAppender" class="org.apache.log4j.DailyRollingFileAppender"> | ||
44 | <param name="File" value="${catalina.base}/logs/sohu.port.info" /> | ||
45 | <param name="DatePattern" value="'.'yyyy-MM-dd'.log'" /> | ||
46 | <layout class="org.apache.log4j.PatternLayout"> | ||
47 | <param name="ConversionPattern" | ||
48 | value="[%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p] [%t] %c{1}:%m%n" /> | ||
49 | </layout> | ||
50 | <filter class="org.apache.log4j.varia.LevelRangeFilter"> | ||
51 | <param name="levelMin" value="info" /> | ||
52 | <param name="levelMax" value="error" /> | ||
53 | <param name="AcceptOnMatch" value="true" /> | ||
54 | </filter> | ||
55 | </appender> | ||
56 | |||
57 | <appender name="drfErrorAppender" class="org.apache.log4j.DailyRollingFileAppender"> | ||
58 | <param name="File" value="${catalina.base}/logs/sohu.port.error" /> | ||
59 | <param name="DatePattern" value="'.'yyyy-MM-dd'.log'" /> | ||
60 | <layout class="org.apache.log4j.PatternLayout"> | ||
61 | <param name="ConversionPattern" | ||
62 | value="[%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p] [%t] %c{1}:%m%n" /> | ||
63 | </layout> | ||
64 | <filter class="org.apache.log4j.varia.LevelRangeFilter"> | ||
65 | <param name="levelMin" value="warning" /> | ||
66 | <param name="levelMax" value="error" /> | ||
67 | <param name="AcceptOnMatch" value="true" /> | ||
68 | </filter> | ||
69 | |||
70 | </appender> | ||
71 | |||
72 | |||
73 | |||
74 | <!-- 指定logger的设置,additivity指示是否遵循缺省的继承机制 <logger name="test.TaskCommandHandler" | ||
75 | additivity="false"> <priority value ="info"/> <appender-ref ref="cAppender" | ||
76 | /> <appender-ref ref="drfAppender" /> </logger> --> | ||
77 | |||
78 | <!-- 根logger的设置 --> | ||
79 | <root> | ||
80 | <priority value="info" /> | ||
81 | <appender-ref ref="cAppender" /> | ||
82 | <appender-ref ref="drfInfoAppender" /> | ||
83 | <!--<appender-ref ref="drfErrorAppender"/> --> | ||
84 | |||
85 | </root> | ||
86 | </log4j:configuration> | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
1 | org.quartz.scheduler.instanceName = Sohu Interface Scheduler | ||
2 | org.quartz.scheduler.instanceId = 01 | ||
3 | |||
4 | org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool | ||
5 | org.quartz.threadPool.threadCount = 10 | ||
6 | |||
7 | org.quartz.jobStore.class = org.quartz.simpl.RAMJobStore | ||
8 | |||
9 | org.quartz.plugin.jobInitializer.class = org.quartz.plugins.xml.XMLSchedulingDataProcessorPlugin | ||
10 | org.quartz.plugin.jobInitializer.fileNames = quartz_jobs.xml | ||
11 | org.quartz.plugin.jobInitializer.failOnFileNotFound = true | ||
12 | |||
13 | org.quartz.scheduler.jmx.export = true | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
target/port/WEB-INF/classes/quartz_jobs.xml
0 → 100644
1 | <?xml version='1.0' encoding='utf-8'?> | ||
2 | <job-scheduling-data | ||
3 | xmlns="http://www.quartz-scheduler.org/xml/JobSchedulingData" | ||
4 | xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||
5 | xsi:schemaLocation="http://www.quartz-scheduler.org/xml/JobSchedulingData http://www.quartz-scheduler.org/xml/job_scheduling_data_2_0.xsd" | ||
6 | version="2.0"> | ||
7 | <schedule> | ||
8 | <job> | ||
9 | <name>M3U8Job</name> | ||
10 | <group>M3U8JobGroup</group> | ||
11 | <description>M3U8Job</description> | ||
12 | <job-class>com.topdraw.sohu.port.job.M3U8Job</job-class> | ||
13 | </job> | ||
14 | <trigger> | ||
15 | <simple> | ||
16 | <name>M3U8JobTrigger</name> | ||
17 | <group>M3U8JobGroup</group> | ||
18 | <job-name>M3U8Job</job-name> | ||
19 | <job-group>M3U8JobGroup</job-group> | ||
20 | <start-time-seconds-in-future>5</start-time-seconds-in-future> | ||
21 | <repeat-count>-1</repeat-count> | ||
22 | <repeat-interval>30000</repeat-interval> | ||
23 | </simple> | ||
24 | </trigger> | ||
25 | |||
26 | <job> | ||
27 | <name>MetaDataJob</name> | ||
28 | <group>MetaDataJobGroup</group> | ||
29 | <description>MetaDataJob</description> | ||
30 | <job-class>com.topdraw.sohu.port.job.MetaDataJob</job-class> | ||
31 | </job> | ||
32 | <trigger> | ||
33 | <cron> | ||
34 | <name>MetaDataJobTrigger</name> | ||
35 | <group>MetaDataJobGroup</group> | ||
36 | <job-name>MetaDataJob</job-name> | ||
37 | <job-group>MetaDataJobGroup</job-group> | ||
38 | <cron-expression>0 55 * * * ?</cron-expression> | ||
39 | </cron> | ||
40 | </trigger> | ||
41 | </schedule> | ||
42 | </job-scheduling-data> | ||
... | \ No newline at end of file | ... | \ No newline at end of file |
target/port/WEB-INF/lib/aopalliance-1.0.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/asm-5.0.3.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/c3p0-0.9.1.1.jar
0 → 100644
No preview for this file type
No preview for this file type
No preview for this file type
target/port/WEB-INF/lib/commons-io-2.2.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/commons-net-3.3.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/dom4j-2.0.0.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/druid-1.1.6.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/fastjson-1.2.20.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/gson-2.3.1.jar
0 → 100644
No preview for this file type
No preview for this file type
No preview for this file type
No preview for this file type
target/port/WEB-INF/lib/jaxen-1.1.6.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/jta-1.1.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/junit-4.12.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/log4j-1.2.17.jar
0 → 100644
No preview for this file type
No preview for this file type
target/port/WEB-INF/lib/quartz-2.2.3.jar
0 → 100644
No preview for this file type
No preview for this file type
target/port/WEB-INF/lib/slf4j-api-1.7.21.jar
0 → 100644
No preview for this file type
No preview for this file type
target/port/WEB-INF/lib/utility-1.0.3.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/xmemcached-2.0.0.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/xmlpull-1.1.3.1.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/xpp3_min-1.1.4c.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/lib/xstream-1.4.7.jar
0 → 100644
No preview for this file type
target/port/WEB-INF/web.xml
0 → 100644
-
Please register or sign in to post a comment