• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2009, Oracle and/or its affiliates. All rights reserved.
3  * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4  *
5  * This code is free software; you can redistribute it and/or modify it
6  * under the terms of the GNU General Public License version 2 only, as
7  * published by the Free Software Foundation.  Oracle designates this
8  * particular file as subject to the "Classpath" exception as provided
9  * by Oracle in the LICENSE file that accompanied this code.
10  *
11  * This code is distributed in the hope that it will be useful, but WITHOUT
12  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
14  * version 2 for more details (a copy is included in the LICENSE file that
15  * accompanied this code).
16  *
17  * You should have received a copy of the GNU General Public License version
18  * 2 along with this work; if not, write to the Free Software Foundation,
19  * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
20  *
21  * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
22  * or visit www.oracle.com if you need additional information or have any
23  * questions.
24  */
25 
26 package sun.net.www.http;
27 
28 import java.io.*;
29 import java.util.ArrayList;
30 import java.util.regex.*;
31 import sun.net.NetProperties;
32 import sun.util.logging.PlatformLogger;
33 
34 /**
35  * Main class of the HTTP traffic capture tool.
36  * Captures are triggered by the sun.net.http.captureRules system property.
37  * If set, it should point to a file containing the capture rules.
38  * Format for the file is simple:
39  * - 1 rule per line
40  * - Lines starting with a # are considered comments and ignored
41  * - a rule is a pair of a regular expression and file pattern, separated by a comma
42  * - The regular expression is applied to URLs, if it matches, the traffic for
43  *   that URL will be captured in the associated file.
44  * - if the file name contains a '%d', then that sequence will be replaced by a
45  *   unique random number for each URL. This allow for multi-threaded captures
46  *   of URLs matching the same pattern.
47  * - Rules are checked in sequence, in the same order as in the file, until a
48  *   match is found or the end of the list is reached.
49  *
50  * Examples of rules:
51  * www\.sun\.com , sun%d.log
52  * yahoo\.com\/.*asf , yahoo.log
53  *
54  * @author jccollet
55  */
56 public class HttpCapture {
57     private File file = null;
58     private boolean incoming = true;
59     private BufferedWriter out = null;
60     private static boolean initialized = false;
61     private static volatile ArrayList<Pattern> patterns = null;
62     private static volatile ArrayList<String> capFiles = null;
63 
init()64     private static synchronized void init() {
65         initialized = true;
66         String rulesFile = java.security.AccessController.doPrivileged(
67             new java.security.PrivilegedAction<String>() {
68                 public String run() {
69                     return NetProperties.get("sun.net.http.captureRules");
70                 }
71             });
72         if (rulesFile != null && !rulesFile.isEmpty()) {
73             BufferedReader in;
74             try {
75                 in = new BufferedReader(new FileReader(rulesFile));
76             } catch (FileNotFoundException ex) {
77                 return;
78             }
79             try {
80                 String line = in.readLine();
81                 while (line != null) {
82                     line = line.trim();
83                     if (!line.startsWith("#")) {
84                         // skip line if it's a comment
85                         String[] s = line.split(",");
86                         if (s.length == 2) {
87                             if (patterns == null) {
88                                 patterns = new ArrayList<Pattern>();
89                                 capFiles = new ArrayList<String>();
90                             }
91                             patterns.add(Pattern.compile(s[0].trim()));
92                             capFiles.add(s[1].trim());
93                         }
94                     }
95                     line = in.readLine();
96                 }
97             } catch (IOException ioe) {
98 
99             } finally {
100                 try {
101                     in.close();
102                 } catch (IOException ex) {
103                 }
104             }
105         }
106     }
107 
isInitialized()108     private static synchronized boolean isInitialized() {
109         return initialized;
110     }
111 
HttpCapture(File f, java.net.URL url)112     private HttpCapture(File f, java.net.URL url) {
113         file = f;
114         try {
115             out = new BufferedWriter(new FileWriter(file, true));
116             out.write("URL: " + url + "\n");
117         } catch (IOException ex) {
118             PlatformLogger.getLogger(HttpCapture.class.getName()).severe(null, ex);
119         }
120     }
121 
sent(int c)122     public synchronized void sent(int c) throws IOException {
123         if (incoming) {
124             out.write("\n------>\n");
125             incoming = false;
126             out.flush();
127         }
128         out.write(c);
129     }
130 
received(int c)131     public synchronized void received(int c) throws IOException {
132         if (!incoming) {
133             out.write("\n<------\n");
134             incoming = true;
135             out.flush();
136         }
137         out.write(c);
138     }
139 
flush()140     public synchronized void flush() throws IOException {
141         out.flush();
142     }
143 
getCapture(java.net.URL url)144     public static HttpCapture getCapture(java.net.URL url) {
145         if (!isInitialized()) {
146             init();
147         }
148         if (patterns == null || patterns.isEmpty()) {
149             return null;
150         }
151         String s = url.toString();
152         for (int i = 0; i < patterns.size(); i++) {
153             Pattern p = patterns.get(i);
154             if (p.matcher(s).find()) {
155                 String f = capFiles.get(i);
156                 File fi;
157                 if (f.indexOf("%d") >= 0) {
158                     java.util.Random rand = new java.util.Random();
159                     do {
160                         String f2 = f.replace("%d", Integer.toString(rand.nextInt()));
161                         fi = new File(f2);
162                     } while (fi.exists());
163                 } else {
164                     fi = new File(f);
165                 }
166                 return new HttpCapture(fi, url);
167             }
168         }
169         return null;
170     }
171 }
172