Reimport
This commit is contained in:
		
						commit
						a0ff94dca2
					
				
							
								
								
									
										1
									
								
								.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								.gitignore
									
									
									
									
										vendored
									
									
										Normal file
									
								
							@ -0,0 +1 @@
 | 
			
		||||
/target/
 | 
			
		||||
							
								
								
									
										
											BIN
										
									
								
								img/apgreen.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								img/apgreen.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| 
		 After Width: | Height: | Size: 2.3 KiB  | 
							
								
								
									
										
											BIN
										
									
								
								img/apgrey.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								img/apgrey.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| 
		 After Width: | Height: | Size: 704 B  | 
							
								
								
									
										
											BIN
										
									
								
								img/apnorm.png
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								img/apnorm.png
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							| 
		 After Width: | Height: | Size: 3.3 KiB  | 
							
								
								
									
										17
									
								
								nbactions.xml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										17
									
								
								nbactions.xml
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,17 @@
 | 
			
		||||
<?xml version="1.0" encoding="UTF-8"?>
 | 
			
		||||
<actions>
 | 
			
		||||
        <action>
 | 
			
		||||
            <actionName>run</actionName>
 | 
			
		||||
            <packagings>
 | 
			
		||||
                <packaging>jar</packaging>
 | 
			
		||||
            </packagings>
 | 
			
		||||
            <goals>
 | 
			
		||||
                <goal>process-classes</goal>
 | 
			
		||||
                <goal>org.codehaus.mojo:exec-maven-plugin:1.5.0:exec</goal>
 | 
			
		||||
            </goals>
 | 
			
		||||
            <properties>
 | 
			
		||||
                <exec.args>-classpath %classpath live.greiner.autophono.AutoPhono</exec.args>
 | 
			
		||||
                <exec.executable>java</exec.executable>
 | 
			
		||||
            </properties>
 | 
			
		||||
        </action>
 | 
			
		||||
    </actions>
 | 
			
		||||
							
								
								
									
										57
									
								
								pom.xml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										57
									
								
								pom.xml
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,57 @@
 | 
			
		||||
<?xml version="1.0" encoding="UTF-8"?>
 | 
			
		||||
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 | 
			
		||||
    <modelVersion>4.0.0</modelVersion>
 | 
			
		||||
    <groupId>live.greiner</groupId>
 | 
			
		||||
    <artifactId>AutoPhono</artifactId>
 | 
			
		||||
    <version>1.0-SNAPSHOT</version>
 | 
			
		||||
    <packaging>jar</packaging>
 | 
			
		||||
    <properties>
 | 
			
		||||
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
 | 
			
		||||
        <mainClass>live.greiner.autophono.AutoPhono</mainClass>
 | 
			
		||||
        <maven.compiler.source>1.8</maven.compiler.source>
 | 
			
		||||
        <maven.compiler.target>1.8</maven.compiler.target>
 | 
			
		||||
    </properties>
 | 
			
		||||
    <build>
 | 
			
		||||
        <plugins>
 | 
			
		||||
            <plugin>
 | 
			
		||||
                <artifactId>maven-dependency-plugin</artifactId>
 | 
			
		||||
                <executions>
 | 
			
		||||
                    <execution>
 | 
			
		||||
                        <phase>install</phase>
 | 
			
		||||
                        <goals>
 | 
			
		||||
                            <goal>copy-dependencies</goal>
 | 
			
		||||
                        </goals>
 | 
			
		||||
                        <configuration>
 | 
			
		||||
                            <outputDirectory>${project.build.directory}/lib</outputDirectory>
 | 
			
		||||
                        </configuration>
 | 
			
		||||
                    </execution>
 | 
			
		||||
                </executions>
 | 
			
		||||
            </plugin>
 | 
			
		||||
            <plugin>
 | 
			
		||||
                <artifactId>maven-jar-plugin</artifactId>
 | 
			
		||||
                <configuration>
 | 
			
		||||
                    <archive>
 | 
			
		||||
                        <manifest>
 | 
			
		||||
                            <addClasspath>true</addClasspath>
 | 
			
		||||
                            <classpathPrefix>lib/</classpathPrefix>
 | 
			
		||||
                            <mainClass>live.greiner.autophono.AutoPhono</mainClass>
 | 
			
		||||
                        </manifest>
 | 
			
		||||
                    </archive>
 | 
			
		||||
                </configuration>
 | 
			
		||||
            </plugin>
 | 
			
		||||
        </plugins>
 | 
			
		||||
    </build>  
 | 
			
		||||
    <repositories>
 | 
			
		||||
        <repository>
 | 
			
		||||
            <id>jitpack.io</id>
 | 
			
		||||
            <url>https://jitpack.io</url>
 | 
			
		||||
        </repository>
 | 
			
		||||
    </repositories>
 | 
			
		||||
    <dependencies>
 | 
			
		||||
        <dependency>
 | 
			
		||||
            <groupId>com.github.vidstige</groupId>
 | 
			
		||||
            <artifactId>jadb</artifactId>
 | 
			
		||||
            <version>v1.1.0</version>
 | 
			
		||||
        </dependency>
 | 
			
		||||
    </dependencies>
 | 
			
		||||
</project>
 | 
			
		||||
							
								
								
									
										196
									
								
								src/main/java/live/greiner/autophono/AutoPhono.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										196
									
								
								src/main/java/live/greiner/autophono/AutoPhono.java
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,196 @@
 | 
			
		||||
package live.greiner.autophono;
 | 
			
		||||
 | 
			
		||||
import java.awt.AWTException;
 | 
			
		||||
import java.awt.Image;
 | 
			
		||||
import java.awt.MenuItem;
 | 
			
		||||
import java.awt.PopupMenu;
 | 
			
		||||
import java.awt.SystemTray;
 | 
			
		||||
import java.awt.Toolkit;
 | 
			
		||||
import java.awt.TrayIcon;
 | 
			
		||||
import java.awt.event.ActionEvent;
 | 
			
		||||
import java.awt.event.ActionListener;
 | 
			
		||||
import java.io.IOException;
 | 
			
		||||
import java.io.InputStream;
 | 
			
		||||
import java.io.OutputStream;
 | 
			
		||||
import java.util.List;
 | 
			
		||||
import java.util.Timer;
 | 
			
		||||
import java.util.TimerTask;
 | 
			
		||||
import java.util.logging.Level;
 | 
			
		||||
import java.util.logging.Logger;
 | 
			
		||||
import se.vidstige.jadb.JadbConnection;
 | 
			
		||||
import se.vidstige.jadb.JadbDevice;
 | 
			
		||||
import se.vidstige.jadb.JadbException;
 | 
			
		||||
 | 
			
		||||
public class AutoPhono {
 | 
			
		||||
    
 | 
			
		||||
    public Status state = Status.INITIAL;
 | 
			
		||||
    public Image images[] = {
 | 
			
		||||
        Toolkit.getDefaultToolkit().createImage("img/apgrey.png"),
 | 
			
		||||
        Toolkit.getDefaultToolkit().createImage("img/apnorm.png"),
 | 
			
		||||
        Toolkit.getDefaultToolkit().createImage("img/apgreen.png")
 | 
			
		||||
    };
 | 
			
		||||
    public PopupMenu[] popMenues = new PopupMenu[2];
 | 
			
		||||
    
 | 
			
		||||
    
 | 
			
		||||
    public AutoPhono(){
 | 
			
		||||
        System.out.println("CONSTRUCTOR START");
 | 
			
		||||
        final PopupMenu popup1 = new PopupMenu();
 | 
			
		||||
        MenuItem exitItem1 = new MenuItem("Stop");
 | 
			
		||||
        popup1.add(exitItem1);
 | 
			
		||||
        
 | 
			
		||||
        final PopupMenu popup2 = new PopupMenu();
 | 
			
		||||
        MenuItem openItem = new MenuItem("Open");
 | 
			
		||||
        MenuItem exitItem2 = new MenuItem("Stop");
 | 
			
		||||
        popup2.add(openItem);
 | 
			
		||||
        popup2.addSeparator();
 | 
			
		||||
        popup2.add(exitItem2);
 | 
			
		||||
        
 | 
			
		||||
        ActionListener alExit = (ActionEvent e) -> {
 | 
			
		||||
            SystemTray tray = SystemTray.getSystemTray();
 | 
			
		||||
            for(TrayIcon icon:tray.getTrayIcons()){
 | 
			
		||||
                tray.remove(icon);
 | 
			
		||||
            }
 | 
			
		||||
            System.exit(0);
 | 
			
		||||
        };
 | 
			
		||||
        ActionListener alOpen = (ActionEvent e) -> {
 | 
			
		||||
            startScreenMirror();
 | 
			
		||||
        };
 | 
			
		||||
        
 | 
			
		||||
        exitItem1.addActionListener(alExit);
 | 
			
		||||
        exitItem2.addActionListener(alExit);
 | 
			
		||||
        openItem.addActionListener(alOpen);
 | 
			
		||||
        
 | 
			
		||||
        popMenues[0] = popup1;
 | 
			
		||||
        popMenues[1] = popup2;
 | 
			
		||||
        System.out.println("CONSTRUCTOR END");
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    public static void main(String[] args) {
 | 
			
		||||
        
 | 
			
		||||
        AutoPhono ap = new AutoPhono();
 | 
			
		||||
        System.out.println("CREATED AP");
 | 
			
		||||
        ap.startAdbServer();
 | 
			
		||||
        System.out.println("STARTED SERVER");
 | 
			
		||||
        TrayIcon icon = ap.startTrayIcon();
 | 
			
		||||
        
 | 
			
		||||
        TimerTask task = new TimerTask() {
 | 
			
		||||
            @Override
 | 
			
		||||
            public void run() {
 | 
			
		||||
                System.out.println("Checking");
 | 
			
		||||
                Status pre = ap.state;
 | 
			
		||||
                ap.checkDeviceConnected();
 | 
			
		||||
                
 | 
			
		||||
                if(ap.state == Status.CONNECTED){
 | 
			
		||||
                    if(pre == Status.TRAY_ICON_STARTED || pre == Status.UNCONNECTED){
 | 
			
		||||
                        System.out.println("Device Connected");
 | 
			
		||||
                        icon.setToolTip("AutoPhoner - Connected");
 | 
			
		||||
                        icon.setImage(ap.images[1]);
 | 
			
		||||
                        icon.setPopupMenu(ap.popMenues[1]);
 | 
			
		||||
                        ap.startScreenMirror();
 | 
			
		||||
                        System.out.println("Screen Mirror started");
 | 
			
		||||
                    }
 | 
			
		||||
                }
 | 
			
		||||
                if(ap.state == Status.UNCONNECTED && pre == Status.CONNECTED){
 | 
			
		||||
                    System.out.println("Device Disconnected");
 | 
			
		||||
                    icon.setImage(ap.images[0]);
 | 
			
		||||
                    icon.setPopupMenu(ap.popMenues[0]);
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        };
 | 
			
		||||
        
 | 
			
		||||
        Timer timer = new Timer("Timer");
 | 
			
		||||
        long delay = 300L;
 | 
			
		||||
        timer.scheduleAtFixedRate(task, delay,10000);
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    private boolean startAdbServer(){
 | 
			
		||||
        try {
 | 
			
		||||
            String command = "cmd start cmd.exe /K \"cd tools/adb && adb devices\"";
 | 
			
		||||
            Process child = Runtime.getRuntime().exec(command);
 | 
			
		||||
            OutputStream out = child.getOutputStream();
 | 
			
		||||
            InputStream in = child.getInputStream();
 | 
			
		||||
            String validResult = "List of devices attached";
 | 
			
		||||
            String input = "";
 | 
			
		||||
            long start = System.currentTimeMillis();
 | 
			
		||||
            while(start + 10000 > System.currentTimeMillis()){
 | 
			
		||||
                if(in.available()>0){
 | 
			
		||||
                    input += (char)in.read();
 | 
			
		||||
                }
 | 
			
		||||
                if(input.equals(validResult)){
 | 
			
		||||
                    state = Status.ADB_STARTED;
 | 
			
		||||
                    return true;
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        } catch (IOException ex) {
 | 
			
		||||
            return false;
 | 
			
		||||
        }
 | 
			
		||||
        return false;
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    private boolean startScreenMirror(){
 | 
			
		||||
        try {
 | 
			
		||||
            String command2 = "cmd -c start cmd.exe /K \"cd tools/scrcpy && dir && scrcpy.exe -S\"";
 | 
			
		||||
            Process child = Runtime.getRuntime().exec(command2);
 | 
			
		||||
            OutputStream out = child.getOutputStream();
 | 
			
		||||
            InputStream in = child.getInputStream();
 | 
			
		||||
            long start = System.currentTimeMillis();
 | 
			
		||||
            while(start + 10000 > System.currentTimeMillis()){
 | 
			
		||||
                if(in.available()>0){
 | 
			
		||||
                    System.out.print((char)in.read());
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
        } catch (IOException ex) {
 | 
			
		||||
            System.out.println(ex);
 | 
			
		||||
            return false;
 | 
			
		||||
        }
 | 
			
		||||
        return false;
 | 
			
		||||
    }
 | 
			
		||||
    
 | 
			
		||||
    private TrayIcon startTrayIcon(){
 | 
			
		||||
        
 | 
			
		||||
        try {
 | 
			
		||||
            if (!SystemTray.isSupported()) {
 | 
			
		||||
                return null;
 | 
			
		||||
            }
 | 
			
		||||
            
 | 
			
		||||
            TrayIcon trayIcon = new TrayIcon(images[0]);
 | 
			
		||||
            SystemTray tray = SystemTray.getSystemTray();
 | 
			
		||||
            trayIcon.setImageAutoSize(true);
 | 
			
		||||
            trayIcon.setToolTip("AutoPhoner - Searching");
 | 
			
		||||
            trayIcon.setPopupMenu(popMenues[0]);
 | 
			
		||||
            
 | 
			
		||||
            
 | 
			
		||||
            tray.add(trayIcon);
 | 
			
		||||
            state = Status.TRAY_ICON_STARTED;
 | 
			
		||||
            return trayIcon;
 | 
			
		||||
        } catch (AWTException ex) {
 | 
			
		||||
            Logger.getLogger(AutoPhono.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
        }
 | 
			
		||||
        return null;
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    private boolean checkDeviceConnected(){
 | 
			
		||||
        
 | 
			
		||||
        JadbConnection jadb = new JadbConnection();
 | 
			
		||||
        System.out.println("CREATED CONNECTION");    
 | 
			
		||||
        List<JadbDevice> devices;
 | 
			
		||||
        try {
 | 
			
		||||
            System.out.println("GETTING DEVICES");
 | 
			
		||||
            devices = jadb.getDevices();
 | 
			
		||||
            System.out.println("GOT DEVICES");
 | 
			
		||||
            if(devices.size() > 0){
 | 
			
		||||
                state = Status.CONNECTED;
 | 
			
		||||
                return true;
 | 
			
		||||
            }
 | 
			
		||||
            state = Status.UNCONNECTED;
 | 
			
		||||
            return false;
 | 
			
		||||
        } catch (IOException ex) {
 | 
			
		||||
            Logger.getLogger(AutoPhono.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
        } catch (JadbException ex) {
 | 
			
		||||
            Logger.getLogger(AutoPhono.class.getName()).log(Level.SEVERE, null, ex);
 | 
			
		||||
            System.out.println("GOT JADB EXCEPTION");
 | 
			
		||||
        }
 | 
			
		||||
        state = Status.UNCONNECTED;
 | 
			
		||||
        return false;
 | 
			
		||||
    }
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										14
									
								
								src/main/java/live/greiner/autophono/Status.java
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										14
									
								
								src/main/java/live/greiner/autophono/Status.java
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,14 @@
 | 
			
		||||
/*
 | 
			
		||||
 * To change this license header, choose License Headers in Project Properties.
 | 
			
		||||
 * To change this template file, choose Tools | Templates
 | 
			
		||||
 * and open the template in the editor.
 | 
			
		||||
 */
 | 
			
		||||
package live.greiner.autophono;
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 *
 | 
			
		||||
 * @author andre
 | 
			
		||||
 */
 | 
			
		||||
public enum Status {
 | 
			
		||||
    INITIAL, ADB_STARTED, TRAY_ICON_STARTED, UNCONNECTED, CONNECTED;
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										
											BIN
										
									
								
								src/scrcpy/AdbWinApi.dll
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/scrcpy/AdbWinApi.dll
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								src/scrcpy/AdbWinUsbApi.dll
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/scrcpy/AdbWinUsbApi.dll
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								src/scrcpy/SDL2.dll
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/scrcpy/SDL2.dll
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								src/scrcpy/adb.exe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/scrcpy/adb.exe
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								src/scrcpy/avcodec-58.dll
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/scrcpy/avcodec-58.dll
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								src/scrcpy/avformat-58.dll
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/scrcpy/avformat-58.dll
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								src/scrcpy/avutil-56.dll
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/scrcpy/avutil-56.dll
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								src/scrcpy/scrcpy-noconsole.exe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/scrcpy/scrcpy-noconsole.exe
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								src/scrcpy/scrcpy-server.jar
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/scrcpy/scrcpy-server.jar
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								src/scrcpy/scrcpy.exe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/scrcpy/scrcpy.exe
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								src/scrcpy/swresample-3.dll
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/scrcpy/swresample-3.dll
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								src/scrcpy/swscale-5.dll
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								src/scrcpy/swscale-5.dll
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tools/adb/AdbWinApi.dll
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/AdbWinApi.dll
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tools/adb/AdbWinUsbApi.dll
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/AdbWinUsbApi.dll
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										5796
									
								
								tools/adb/NOTICE.txt
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5796
									
								
								tools/adb/NOTICE.txt
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tools/adb/adb.exe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/adb.exe
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tools/adb/api/annotations.zip
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/api/annotations.zip
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										80604
									
								
								tools/adb/api/api-versions.xml
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										80604
									
								
								tools/adb/api/api-versions.xml
									
									
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tools/adb/dmtracedump.exe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/dmtracedump.exe
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tools/adb/etc1tool.exe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/etc1tool.exe
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tools/adb/fastboot.exe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/fastboot.exe
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tools/adb/hprof-conv.exe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/hprof-conv.exe
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tools/adb/lib64/libc++.so
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/lib64/libc++.so
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tools/adb/libwinpthread-1.dll
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/libwinpthread-1.dll
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										
											BIN
										
									
								
								tools/adb/make_f2fs.exe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/make_f2fs.exe
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										53
									
								
								tools/adb/mke2fs.conf
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										53
									
								
								tools/adb/mke2fs.conf
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,53 @@
 | 
			
		||||
[defaults]
 | 
			
		||||
    base_features = sparse_super,large_file,filetype,resize_inode,dir_index,ext_attr
 | 
			
		||||
    default_mntopts = acl,user_xattr
 | 
			
		||||
    enable_periodic_fsck = 0
 | 
			
		||||
    blocksize = 4096
 | 
			
		||||
    inode_size = 256
 | 
			
		||||
    inode_ratio = 16384
 | 
			
		||||
    reserved_ratio = 1.0
 | 
			
		||||
 | 
			
		||||
[fs_types]
 | 
			
		||||
    ext3 = {
 | 
			
		||||
        features = has_journal
 | 
			
		||||
    }
 | 
			
		||||
    ext4 = {
 | 
			
		||||
        features = has_journal,extent,huge_file,dir_nlink,extra_isize,uninit_bg
 | 
			
		||||
        inode_size = 256
 | 
			
		||||
    }
 | 
			
		||||
    ext4dev = {
 | 
			
		||||
        features = has_journal,extent,huge_file,flex_bg,inline_data,64bit,dir_nlink,extra_isize
 | 
			
		||||
        inode_size = 256
 | 
			
		||||
        options = test_fs=1
 | 
			
		||||
    }
 | 
			
		||||
    small = {
 | 
			
		||||
        blocksize = 1024
 | 
			
		||||
        inode_size = 128
 | 
			
		||||
        inode_ratio = 4096
 | 
			
		||||
    }
 | 
			
		||||
    floppy = {
 | 
			
		||||
        blocksize = 1024
 | 
			
		||||
        inode_size = 128
 | 
			
		||||
        inode_ratio = 8192
 | 
			
		||||
    }
 | 
			
		||||
    big = {
 | 
			
		||||
        inode_ratio = 32768
 | 
			
		||||
    }
 | 
			
		||||
    huge = {
 | 
			
		||||
        inode_ratio = 65536
 | 
			
		||||
    }
 | 
			
		||||
    news = {
 | 
			
		||||
        inode_ratio = 4096
 | 
			
		||||
    }
 | 
			
		||||
    largefile = {
 | 
			
		||||
        inode_ratio = 1048576
 | 
			
		||||
        blocksize = -1
 | 
			
		||||
    }
 | 
			
		||||
    largefile4 = {
 | 
			
		||||
        inode_ratio = 4194304
 | 
			
		||||
        blocksize = -1
 | 
			
		||||
    }
 | 
			
		||||
    hurd = {
 | 
			
		||||
         blocksize = 4096
 | 
			
		||||
         inode_size = 128
 | 
			
		||||
    }
 | 
			
		||||
							
								
								
									
										
											BIN
										
									
								
								tools/adb/mke2fs.exe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/mke2fs.exe
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										2
									
								
								tools/adb/source.properties
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										2
									
								
								tools/adb/source.properties
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,2 @@
 | 
			
		||||
Pkg.UserSrc=false
 | 
			
		||||
Pkg.Revision=29.0.5
 | 
			
		||||
							
								
								
									
										
											BIN
										
									
								
								tools/adb/sqlite3.exe
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								tools/adb/sqlite3.exe
									
									
									
									
									
										Normal file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										205
									
								
								tools/adb/systrace/NOTICE
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										205
									
								
								tools/adb/systrace/NOTICE
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,205 @@
 | 
			
		||||
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
 | 
			
		||||
//
 | 
			
		||||
// Redistribution and use in source and binary forms, with or without
 | 
			
		||||
// modification, are permitted provided that the following conditions are
 | 
			
		||||
// met:
 | 
			
		||||
//
 | 
			
		||||
//    * Redistributions of source code must retain the above copyright
 | 
			
		||||
// notice, this list of conditions and the following disclaimer.
 | 
			
		||||
//    * Redistributions in binary form must reproduce the above
 | 
			
		||||
// copyright notice, this list of conditions and the following disclaimer
 | 
			
		||||
// in the documentation and/or other materials provided with the
 | 
			
		||||
// distribution.
 | 
			
		||||
//    * Neither the name of Google Inc. nor the names of its
 | 
			
		||||
// contributors may be used to endorse or promote products derived from
 | 
			
		||||
// this software without specific prior written permission.
 | 
			
		||||
//
 | 
			
		||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 | 
			
		||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 | 
			
		||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 | 
			
		||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 | 
			
		||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 | 
			
		||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 | 
			
		||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 | 
			
		||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 | 
			
		||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 | 
			
		||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 | 
			
		||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
                                 Apache License
 | 
			
		||||
                           Version 2.0, January 2004
 | 
			
		||||
                        http://www.apache.org/licenses/
 | 
			
		||||
 | 
			
		||||
   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
 | 
			
		||||
 | 
			
		||||
   1. Definitions.
 | 
			
		||||
 | 
			
		||||
      "License" shall mean the terms and conditions for use, reproduction,
 | 
			
		||||
      and distribution as defined by Sections 1 through 9 of this document.
 | 
			
		||||
 | 
			
		||||
      "Licensor" shall mean the copyright owner or entity authorized by
 | 
			
		||||
      the copyright owner that is granting the License.
 | 
			
		||||
 | 
			
		||||
      "Legal Entity" shall mean the union of the acting entity and all
 | 
			
		||||
      other entities that control, are controlled by, or are under common
 | 
			
		||||
      control with that entity. For the purposes of this definition,
 | 
			
		||||
      "control" means (i) the power, direct or indirect, to cause the
 | 
			
		||||
      direction or management of such entity, whether by contract or
 | 
			
		||||
      otherwise, or (ii) ownership of fifty percent (50%) or more of the
 | 
			
		||||
      outstanding shares, or (iii) beneficial ownership of such entity.
 | 
			
		||||
 | 
			
		||||
      "You" (or "Your") shall mean an individual or Legal Entity
 | 
			
		||||
      exercising permissions granted by this License.
 | 
			
		||||
 | 
			
		||||
      "Source" form shall mean the preferred form for making modifications,
 | 
			
		||||
      including but not limited to software source code, documentation
 | 
			
		||||
      source, and configuration files.
 | 
			
		||||
 | 
			
		||||
      "Object" form shall mean any form resulting from mechanical
 | 
			
		||||
      transformation or translation of a Source form, including but
 | 
			
		||||
      not limited to compiled object code, generated documentation,
 | 
			
		||||
      and conversions to other media types.
 | 
			
		||||
 | 
			
		||||
      "Work" shall mean the work of authorship, whether in Source or
 | 
			
		||||
      Object form, made available under the License, as indicated by a
 | 
			
		||||
      copyright notice that is included in or attached to the work
 | 
			
		||||
      (an example is provided in the Appendix below).
 | 
			
		||||
 | 
			
		||||
      "Derivative Works" shall mean any work, whether in Source or Object
 | 
			
		||||
      form, that is based on (or derived from) the Work and for which the
 | 
			
		||||
      editorial revisions, annotations, elaborations, or other modifications
 | 
			
		||||
      represent, as a whole, an original work of authorship. For the purposes
 | 
			
		||||
      of this License, Derivative Works shall not include works that remain
 | 
			
		||||
      separable from, or merely link (or bind by name) to the interfaces of,
 | 
			
		||||
      the Work and Derivative Works thereof.
 | 
			
		||||
 | 
			
		||||
      "Contribution" shall mean any work of authorship, including
 | 
			
		||||
      the original version of the Work and any modifications or additions
 | 
			
		||||
      to that Work or Derivative Works thereof, that is intentionally
 | 
			
		||||
      submitted to Licensor for inclusion in the Work by the copyright owner
 | 
			
		||||
      or by an individual or Legal Entity authorized to submit on behalf of
 | 
			
		||||
      the copyright owner. For the purposes of this definition, "submitted"
 | 
			
		||||
      means any form of electronic, verbal, or written communication sent
 | 
			
		||||
      to the Licensor or its representatives, including but not limited to
 | 
			
		||||
      communication on electronic mailing lists, source code control systems,
 | 
			
		||||
      and issue tracking systems that are managed by, or on behalf of, the
 | 
			
		||||
      Licensor for the purpose of discussing and improving the Work, but
 | 
			
		||||
      excluding communication that is conspicuously marked or otherwise
 | 
			
		||||
      designated in writing by the copyright owner as "Not a Contribution."
 | 
			
		||||
 | 
			
		||||
      "Contributor" shall mean Licensor and any individual or Legal Entity
 | 
			
		||||
      on behalf of whom a Contribution has been received by Licensor and
 | 
			
		||||
      subsequently incorporated within the Work.
 | 
			
		||||
 | 
			
		||||
   2. Grant of Copyright License. Subject to the terms and conditions of
 | 
			
		||||
      this License, each Contributor hereby grants to You a perpetual,
 | 
			
		||||
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
 | 
			
		||||
      copyright license to reproduce, prepare Derivative Works of,
 | 
			
		||||
      publicly display, publicly perform, sublicense, and distribute the
 | 
			
		||||
      Work and such Derivative Works in Source or Object form.
 | 
			
		||||
 | 
			
		||||
   3. Grant of Patent License. Subject to the terms and conditions of
 | 
			
		||||
      this License, each Contributor hereby grants to You a perpetual,
 | 
			
		||||
      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
 | 
			
		||||
      (except as stated in this section) patent license to make, have made,
 | 
			
		||||
      use, offer to sell, sell, import, and otherwise transfer the Work,
 | 
			
		||||
      where such license applies only to those patent claims licensable
 | 
			
		||||
      by such Contributor that are necessarily infringed by their
 | 
			
		||||
      Contribution(s) alone or by combination of their Contribution(s)
 | 
			
		||||
      with the Work to which such Contribution(s) was submitted. If You
 | 
			
		||||
      institute patent litigation against any entity (including a
 | 
			
		||||
      cross-claim or counterclaim in a lawsuit) alleging that the Work
 | 
			
		||||
      or a Contribution incorporated within the Work constitutes direct
 | 
			
		||||
      or contributory patent infringement, then any patent licenses
 | 
			
		||||
      granted to You under this License for that Work shall terminate
 | 
			
		||||
      as of the date such litigation is filed.
 | 
			
		||||
 | 
			
		||||
   4. Redistribution. You may reproduce and distribute copies of the
 | 
			
		||||
      Work or Derivative Works thereof in any medium, with or without
 | 
			
		||||
      modifications, and in Source or Object form, provided that You
 | 
			
		||||
      meet the following conditions:
 | 
			
		||||
 | 
			
		||||
      (a) You must give any other recipients of the Work or
 | 
			
		||||
          Derivative Works a copy of this License; and
 | 
			
		||||
 | 
			
		||||
      (b) You must cause any modified files to carry prominent notices
 | 
			
		||||
          stating that You changed the files; and
 | 
			
		||||
 | 
			
		||||
      (c) You must retain, in the Source form of any Derivative Works
 | 
			
		||||
          that You distribute, all copyright, patent, trademark, and
 | 
			
		||||
          attribution notices from the Source form of the Work,
 | 
			
		||||
          excluding those notices that do not pertain to any part of
 | 
			
		||||
          the Derivative Works; and
 | 
			
		||||
 | 
			
		||||
      (d) If the Work includes a "NOTICE" text file as part of its
 | 
			
		||||
          distribution, then any Derivative Works that You distribute must
 | 
			
		||||
          include a readable copy of the attribution notices contained
 | 
			
		||||
          within such NOTICE file, excluding those notices that do not
 | 
			
		||||
          pertain to any part of the Derivative Works, in at least one
 | 
			
		||||
          of the following places: within a NOTICE text file distributed
 | 
			
		||||
          as part of the Derivative Works; within the Source form or
 | 
			
		||||
          documentation, if provided along with the Derivative Works; or,
 | 
			
		||||
          within a display generated by the Derivative Works, if and
 | 
			
		||||
          wherever such third-party notices normally appear. The contents
 | 
			
		||||
          of the NOTICE file are for informational purposes only and
 | 
			
		||||
          do not modify the License. You may add Your own attribution
 | 
			
		||||
          notices within Derivative Works that You distribute, alongside
 | 
			
		||||
          or as an addendum to the NOTICE text from the Work, provided
 | 
			
		||||
          that such additional attribution notices cannot be construed
 | 
			
		||||
          as modifying the License.
 | 
			
		||||
 | 
			
		||||
      You may add Your own copyright statement to Your modifications and
 | 
			
		||||
      may provide additional or different license terms and conditions
 | 
			
		||||
      for use, reproduction, or distribution of Your modifications, or
 | 
			
		||||
      for any such Derivative Works as a whole, provided Your use,
 | 
			
		||||
      reproduction, and distribution of the Work otherwise complies with
 | 
			
		||||
      the conditions stated in this License.
 | 
			
		||||
 | 
			
		||||
   5. Submission of Contributions. Unless You explicitly state otherwise,
 | 
			
		||||
      any Contribution intentionally submitted for inclusion in the Work
 | 
			
		||||
      by You to the Licensor shall be under the terms and conditions of
 | 
			
		||||
      this License, without any additional terms or conditions.
 | 
			
		||||
      Notwithstanding the above, nothing herein shall supersede or modify
 | 
			
		||||
      the terms of any separate license agreement you may have executed
 | 
			
		||||
      with Licensor regarding such Contributions.
 | 
			
		||||
 | 
			
		||||
   6. Trademarks. This License does not grant permission to use the trade
 | 
			
		||||
      names, trademarks, service marks, or product names of the Licensor,
 | 
			
		||||
      except as required for reasonable and customary use in describing the
 | 
			
		||||
      origin of the Work and reproducing the content of the NOTICE file.
 | 
			
		||||
 | 
			
		||||
   7. Disclaimer of Warranty. Unless required by applicable law or
 | 
			
		||||
      agreed to in writing, Licensor provides the Work (and each
 | 
			
		||||
      Contributor provides its Contributions) on an "AS IS" BASIS,
 | 
			
		||||
      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
 | 
			
		||||
      implied, including, without limitation, any warranties or conditions
 | 
			
		||||
      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
 | 
			
		||||
      PARTICULAR PURPOSE. You are solely responsible for determining the
 | 
			
		||||
      appropriateness of using or redistributing the Work and assume any
 | 
			
		||||
      risks associated with Your exercise of permissions under this License.
 | 
			
		||||
 | 
			
		||||
   8. Limitation of Liability. In no event and under no legal theory,
 | 
			
		||||
      whether in tort (including negligence), contract, or otherwise,
 | 
			
		||||
      unless required by applicable law (such as deliberate and grossly
 | 
			
		||||
      negligent acts) or agreed to in writing, shall any Contributor be
 | 
			
		||||
      liable to You for damages, including any direct, indirect, special,
 | 
			
		||||
      incidental, or consequential damages of any character arising as a
 | 
			
		||||
      result of this License or out of the use or inability to use the
 | 
			
		||||
      Work (including but not limited to damages for loss of goodwill,
 | 
			
		||||
      work stoppage, computer failure or malfunction, or any and all
 | 
			
		||||
      other commercial damages or losses), even if such Contributor
 | 
			
		||||
      has been advised of the possibility of such damages.
 | 
			
		||||
 | 
			
		||||
   9. Accepting Warranty or Additional Liability. While redistributing
 | 
			
		||||
      the Work or Derivative Works thereof, You may choose to offer,
 | 
			
		||||
      and charge a fee for, acceptance of support, warranty, indemnity,
 | 
			
		||||
      or other liability obligations and/or rights consistent with this
 | 
			
		||||
      License. However, in accepting such obligations, You may act only
 | 
			
		||||
      on Your own behalf and on Your sole responsibility, not on behalf
 | 
			
		||||
      of any other Contributor, and only if You agree to indemnify,
 | 
			
		||||
      defend, and hold each Contributor harmless for any liability
 | 
			
		||||
      incurred by, or claims asserted against, such Contributor by reason
 | 
			
		||||
      of your accepting any such warranty or additional liability.
 | 
			
		||||
 | 
			
		||||
   END OF TERMS AND CONDITIONS
 | 
			
		||||
							
								
								
									
										1
									
								
								tools/adb/systrace/UPSTREAM_REVISION
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										1
									
								
								tools/adb/systrace/UPSTREAM_REVISION
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1 @@
 | 
			
		||||
cad35e22dcad126c6a20663ded101565e6326d82
 | 
			
		||||
							
								
								
									
										28
									
								
								tools/adb/systrace/catapult/common/bin/run_tests
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										28
									
								
								tools/adb/systrace/catapult/common/bin/run_tests
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,28 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_CATAPULT_PATH = os.path.abspath(
 | 
			
		||||
    os.path.join(os.path.dirname(__file__), '..', '..'))
 | 
			
		||||
_TESTS = [
 | 
			
		||||
    {'path': os.path.join(
 | 
			
		||||
        _CATAPULT_PATH, 'common', 'eslint', 'bin', 'run_tests')},
 | 
			
		||||
    {'path': os.path.join(
 | 
			
		||||
        _CATAPULT_PATH, 'common', 'py_trace_event', 'bin', 'run_tests')},
 | 
			
		||||
    {'path': os.path.join(
 | 
			
		||||
        _CATAPULT_PATH, 'common', 'py_utils', 'bin', 'run_tests')},
 | 
			
		||||
    {'path': os.path.join(
 | 
			
		||||
        _CATAPULT_PATH, 'common', 'py_vulcanize', 'bin', 'run_py_tests')},
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  sys.path.append(_CATAPULT_PATH)
 | 
			
		||||
  from catapult_build import test_runner
 | 
			
		||||
  sys.exit(test_runner.Main('project', _TESTS, sys.argv))
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,229 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
#
 | 
			
		||||
# Copyright 2013 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
"""Updates the Chrome reference builds.
 | 
			
		||||
 | 
			
		||||
Usage:
 | 
			
		||||
  $ /path/to/update_reference_build.py
 | 
			
		||||
  $ git commit -a
 | 
			
		||||
  $ git cl upload
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import collections
 | 
			
		||||
import logging
 | 
			
		||||
import os
 | 
			
		||||
import shutil
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
import tempfile
 | 
			
		||||
import urllib2
 | 
			
		||||
import zipfile
 | 
			
		||||
 | 
			
		||||
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'py_utils'))
 | 
			
		||||
 | 
			
		||||
from py_utils import cloud_storage
 | 
			
		||||
from dependency_manager import base_config
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def BuildNotFoundError(error_string):
 | 
			
		||||
  raise ValueError(error_string)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_CHROME_BINARIES_CONFIG = os.path.join(
 | 
			
		||||
    os.path.dirname(os.path.abspath(__file__)), '..', '..', 'common',
 | 
			
		||||
    'py_utils', 'py_utils', 'chrome_binaries.json')
 | 
			
		||||
 | 
			
		||||
CHROME_GS_BUCKET = 'chrome-unsigned'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Remove a platform name from this list to disable updating it.
 | 
			
		||||
# Add one to enable updating it. (Must also update _PLATFORM_MAP.)
 | 
			
		||||
_PLATFORMS_TO_UPDATE = ['mac_x86_64', 'win_x86', 'win_AMD64', 'linux_x86_64',
 | 
			
		||||
                        'android_k_armeabi-v7a', 'android_l_arm64-v8a',
 | 
			
		||||
                        'android_l_armeabi-v7a', 'android_n_armeabi-v7a',
 | 
			
		||||
                        'android_n_arm64-v8a']
 | 
			
		||||
 | 
			
		||||
# Remove a channel name from this list to disable updating it.
 | 
			
		||||
# Add one to enable updating it.
 | 
			
		||||
_CHANNELS_TO_UPDATE = ['stable', 'canary', 'dev']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Omaha is Chrome's autoupdate server. It reports the current versions used
 | 
			
		||||
# by each platform on each channel.
 | 
			
		||||
_OMAHA_PLATFORMS = { 'stable':  ['mac', 'linux', 'win', 'android'],
 | 
			
		||||
                    'dev':  ['linux'], 'canary': ['mac', 'win']}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# All of the information we need to update each platform.
 | 
			
		||||
#   omaha: name omaha uses for the platforms.
 | 
			
		||||
#   zip_name: name of the zip file to be retrieved from cloud storage.
 | 
			
		||||
#   gs_build: name of the Chrome build platform used in cloud storage.
 | 
			
		||||
#   destination: Name of the folder to download the reference build to.
 | 
			
		||||
UpdateInfo = collections.namedtuple('UpdateInfo',
 | 
			
		||||
    'omaha, gs_folder, gs_build, zip_name')
 | 
			
		||||
_PLATFORM_MAP = {'mac_x86_64': UpdateInfo(omaha='mac',
 | 
			
		||||
                                          gs_folder='desktop-*',
 | 
			
		||||
                                          gs_build='mac64',
 | 
			
		||||
                                          zip_name='chrome-mac.zip'),
 | 
			
		||||
                 'win_x86': UpdateInfo(omaha='win',
 | 
			
		||||
                                       gs_folder='desktop-*',
 | 
			
		||||
                                       gs_build='win-clang',
 | 
			
		||||
                                       zip_name='chrome-win-clang.zip'),
 | 
			
		||||
                 'win_AMD64': UpdateInfo(omaha='win',
 | 
			
		||||
                                         gs_folder='desktop-*',
 | 
			
		||||
                                         gs_build='win64-clang',
 | 
			
		||||
                                         zip_name='chrome-win64-clang.zip'),
 | 
			
		||||
                 'linux_x86_64': UpdateInfo(omaha='linux',
 | 
			
		||||
                                            gs_folder='desktop-*',
 | 
			
		||||
                                            gs_build='linux64',
 | 
			
		||||
                                            zip_name='chrome-linux64.zip'),
 | 
			
		||||
                 'android_k_armeabi-v7a': UpdateInfo(omaha='android',
 | 
			
		||||
                                                     gs_folder='android-*',
 | 
			
		||||
                                                     gs_build='arm',
 | 
			
		||||
                                                     zip_name='Chrome.apk'),
 | 
			
		||||
                 'android_l_arm64-v8a': UpdateInfo(omaha='android',
 | 
			
		||||
                                                   gs_folder='android-*',
 | 
			
		||||
                                                   gs_build='arm_64',
 | 
			
		||||
                                                   zip_name='ChromeModern.apk'),
 | 
			
		||||
                 'android_l_armeabi-v7a': UpdateInfo(omaha='android',
 | 
			
		||||
                                                     gs_folder='android-*',
 | 
			
		||||
                                                     gs_build='arm',
 | 
			
		||||
                                                     zip_name='Chrome.apk'),
 | 
			
		||||
                 'android_n_armeabi-v7a': UpdateInfo(omaha='android',
 | 
			
		||||
                                                     gs_folder='android-*',
 | 
			
		||||
                                                     gs_build='arm',
 | 
			
		||||
                                                     zip_name='Monochrome.apk'),
 | 
			
		||||
                 'android_n_arm64-v8a': UpdateInfo(omaha='android',
 | 
			
		||||
                                                   gs_folder='android-*',
 | 
			
		||||
                                                   gs_build='arm_64',
 | 
			
		||||
                                                   zip_name='Monochrome.apk'),
 | 
			
		||||
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _ChannelVersionsMap(channel):
 | 
			
		||||
  rows = _OmahaReportVersionInfo(channel)
 | 
			
		||||
  omaha_versions_map = _OmahaVersionsMap(rows, channel)
 | 
			
		||||
  channel_versions_map = {}
 | 
			
		||||
  for platform in _PLATFORMS_TO_UPDATE:
 | 
			
		||||
    omaha_platform = _PLATFORM_MAP[platform].omaha
 | 
			
		||||
    if omaha_platform in omaha_versions_map:
 | 
			
		||||
      channel_versions_map[platform] = omaha_versions_map[omaha_platform]
 | 
			
		||||
  return channel_versions_map
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _OmahaReportVersionInfo(channel):
 | 
			
		||||
  url ='https://omahaproxy.appspot.com/all?channel=%s' % channel
 | 
			
		||||
  lines = urllib2.urlopen(url).readlines()
 | 
			
		||||
  return [l.split(',') for l in lines]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _OmahaVersionsMap(rows, channel):
 | 
			
		||||
  platforms = _OMAHA_PLATFORMS.get(channel, [])
 | 
			
		||||
  if (len(rows) < 1 or
 | 
			
		||||
      not rows[0][0:3] == ['os', 'channel', 'current_version']):
 | 
			
		||||
    raise ValueError(
 | 
			
		||||
        'Omaha report is not in the expected form: %s.' % rows)
 | 
			
		||||
  versions_map = {}
 | 
			
		||||
  for row in rows[1:]:
 | 
			
		||||
    if row[1] != channel:
 | 
			
		||||
      raise ValueError(
 | 
			
		||||
          'Omaha report contains a line with the channel %s' % row[1])
 | 
			
		||||
    if row[0] in platforms:
 | 
			
		||||
      versions_map[row[0]] = row[2]
 | 
			
		||||
  logging.warn('versions map: %s' % versions_map)
 | 
			
		||||
  if not all(platform in versions_map for platform in platforms):
 | 
			
		||||
    raise ValueError(
 | 
			
		||||
        'Omaha report did not contain all desired platforms for channel %s' % channel)
 | 
			
		||||
  return versions_map
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _QueuePlatformUpdate(platform, version, config, channel):
 | 
			
		||||
  """ platform: the name of the platform for the browser to
 | 
			
		||||
      be downloaded & updated from cloud storage. """
 | 
			
		||||
  platform_info = _PLATFORM_MAP[platform]
 | 
			
		||||
  filename = platform_info.zip_name
 | 
			
		||||
  # remote_path example: desktop-*/30.0.1595.0/precise32/chrome-precise32.zip
 | 
			
		||||
  remote_path = '%s/%s/%s/%s' % (
 | 
			
		||||
      platform_info.gs_folder, version, platform_info.gs_build, filename)
 | 
			
		||||
  if not cloud_storage.Exists(CHROME_GS_BUCKET, remote_path):
 | 
			
		||||
    cloud_storage_path = 'gs://%s/%s' % (CHROME_GS_BUCKET, remote_path)
 | 
			
		||||
    raise BuildNotFoundError(
 | 
			
		||||
        'Failed to find %s build for version %s at path %s.' % (
 | 
			
		||||
            platform, version, cloud_storage_path))
 | 
			
		||||
  reference_builds_folder = os.path.join(
 | 
			
		||||
      os.path.dirname(os.path.abspath(__file__)), 'chrome_telemetry_build',
 | 
			
		||||
      'reference_builds', channel)
 | 
			
		||||
  if not os.path.exists(reference_builds_folder):
 | 
			
		||||
    os.makedirs(reference_builds_folder)
 | 
			
		||||
  local_dest_path = os.path.join(reference_builds_folder, filename)
 | 
			
		||||
  cloud_storage.Get(CHROME_GS_BUCKET, remote_path, local_dest_path)
 | 
			
		||||
  _ModifyBuildIfNeeded(local_dest_path, platform)
 | 
			
		||||
  config.AddCloudStorageDependencyUpdateJob(
 | 
			
		||||
      'chrome_%s' % channel, platform, local_dest_path, version=version,
 | 
			
		||||
      execute_job=False)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _ModifyBuildIfNeeded(location, platform):
 | 
			
		||||
  """Hook to modify the build before saving it for Telemetry to use.
 | 
			
		||||
 | 
			
		||||
  This can be used to remove various utilities that cause noise in a
 | 
			
		||||
  test environment. Right now, it is just used to remove Keystone,
 | 
			
		||||
  which is a tool used to autoupdate Chrome.
 | 
			
		||||
  """
 | 
			
		||||
  if platform == 'mac_x86_64':
 | 
			
		||||
    _RemoveKeystoneFromBuild(location)
 | 
			
		||||
    return
 | 
			
		||||
 | 
			
		||||
  if 'mac' in platform:
 | 
			
		||||
    raise NotImplementedError(
 | 
			
		||||
        'Platform <%s> sounds like it is an OSX version. If so, we may need to '
 | 
			
		||||
        'remove Keystone from it per crbug.com/932615. Please edit this script'
 | 
			
		||||
        ' and teach it what needs to be done :).')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _RemoveKeystoneFromBuild(location):
 | 
			
		||||
  """Removes the Keystone autoupdate binary from the chrome mac zipfile."""
 | 
			
		||||
  logging.info('Removing keystone from mac build at %s' % location)
 | 
			
		||||
  temp_folder = tempfile.mkdtemp(prefix='RemoveKeystoneFromBuild')
 | 
			
		||||
  try:
 | 
			
		||||
    subprocess.check_call(['unzip', '-q', location, '-d', temp_folder])
 | 
			
		||||
    keystone_folder = os.path.join(
 | 
			
		||||
        temp_folder, 'chrome-mac', 'Google Chrome.app', 'Contents',
 | 
			
		||||
        'Frameworks', 'Google Chrome Framework.framework', 'Frameworks',
 | 
			
		||||
        'KeystoneRegistration.framework')
 | 
			
		||||
    shutil.rmtree(keystone_folder)
 | 
			
		||||
    os.remove(location)
 | 
			
		||||
    subprocess.check_call(['zip', '--quiet', '--recurse-paths', '--symlinks',
 | 
			
		||||
                           location, 'chrome-mac'],
 | 
			
		||||
                           cwd=temp_folder)
 | 
			
		||||
  finally:
 | 
			
		||||
    shutil.rmtree(temp_folder)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def UpdateBuilds():
 | 
			
		||||
  config = base_config.BaseConfig(_CHROME_BINARIES_CONFIG, writable=True)
 | 
			
		||||
  for channel in _CHANNELS_TO_UPDATE:
 | 
			
		||||
    channel_versions_map = _ChannelVersionsMap(channel)
 | 
			
		||||
    for platform in channel_versions_map:
 | 
			
		||||
      print 'Downloading Chrome (%s channel) on %s' % (channel, platform)
 | 
			
		||||
      current_version = config.GetVersion('chrome_%s' % channel, platform)
 | 
			
		||||
      channel_version =  channel_versions_map.get(platform)
 | 
			
		||||
      print 'current: %s, channel: %s' % (current_version, channel_version)
 | 
			
		||||
      if current_version and current_version == channel_version:
 | 
			
		||||
        continue
 | 
			
		||||
      _QueuePlatformUpdate(platform, channel_version, config, channel)
 | 
			
		||||
 | 
			
		||||
  print 'Updating chrome builds with downloaded binaries'
 | 
			
		||||
  config.ExecuteUpdateJobs(force=True)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main():
 | 
			
		||||
  logging.getLogger().setLevel(logging.DEBUG)
 | 
			
		||||
  UpdateBuilds()
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  main()
 | 
			
		||||
							
								
								
									
										20
									
								
								tools/adb/systrace/catapult/common/eslint/LICENSE
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										20
									
								
								tools/adb/systrace/catapult/common/eslint/LICENSE
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,20 @@
 | 
			
		||||
ESLint
 | 
			
		||||
Copyright jQuery Foundation and other contributors, https://jquery.org/
 | 
			
		||||
 | 
			
		||||
Permission is hereby granted, free of charge, to any person obtaining a copy
 | 
			
		||||
of this software and associated documentation files (the "Software"), to deal
 | 
			
		||||
in the Software without restriction, including without limitation the rights
 | 
			
		||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
 | 
			
		||||
copies of the Software, and to permit persons to whom the Software is
 | 
			
		||||
furnished to do so, subject to the following conditions:
 | 
			
		||||
 | 
			
		||||
The above copyright notice and this permission notice shall be included in
 | 
			
		||||
all copies or substantial portions of the Software.
 | 
			
		||||
 | 
			
		||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 | 
			
		||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 | 
			
		||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
 | 
			
		||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 | 
			
		||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
 | 
			
		||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
 | 
			
		||||
THE SOFTWARE.
 | 
			
		||||
							
								
								
									
										5
									
								
								tools/adb/systrace/catapult/common/eslint/README.md
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										5
									
								
								tools/adb/systrace/catapult/common/eslint/README.md
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,5 @@
 | 
			
		||||
This directory contains the Catapult eslint config, custom Catapult eslint rules,
 | 
			
		||||
and tests for those rules.
 | 
			
		||||
 | 
			
		||||
Some of our custom rules are modified versions of those included with eslint, as
 | 
			
		||||
suggested in https://goo.gl/uAxFHq.
 | 
			
		||||
							
								
								
									
										54
									
								
								tools/adb/systrace/catapult/common/eslint/bin/run_eslint
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										54
									
								
								tools/adb/systrace/catapult/common/eslint/bin/run_eslint
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,54 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import argparse
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_CATAPULT_PATH = os.path.abspath(
 | 
			
		||||
    os.path.join(os.path.dirname(__file__),
 | 
			
		||||
                 os.path.pardir, os.path.pardir, os.path.pardir))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_ESLINT_PATH = os.path.abspath(
 | 
			
		||||
    os.path.join(os.path.dirname(__file__), os.path.pardir))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
DIRECTORIES_TO_LINT = [
 | 
			
		||||
  os.path.join(_CATAPULT_PATH, 'dashboard', 'dashboard'),
 | 
			
		||||
  os.path.join(_CATAPULT_PATH, 'tracing', 'tracing')
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _AddToPathIfNeeded(path):
 | 
			
		||||
  if path not in sys.path:
 | 
			
		||||
    sys.path.insert(0, path)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  _AddToPathIfNeeded(_ESLINT_PATH)
 | 
			
		||||
  import eslint
 | 
			
		||||
 | 
			
		||||
  parser = argparse.ArgumentParser(
 | 
			
		||||
      description='Wrapper script to run eslint on Catapult code')
 | 
			
		||||
  parser.add_argument('--paths', '-p', default=None, nargs='+', metavar='PATH',
 | 
			
		||||
                      help='List of paths to lint')
 | 
			
		||||
  parser.add_argument('--all', default=None, action='store_true',
 | 
			
		||||
                      help='Runs eslint on all applicable Catapult code')
 | 
			
		||||
  parser.add_argument('--extra-args', default=None, type=str,
 | 
			
		||||
                      help='A string of extra arguments to pass to eslint')
 | 
			
		||||
 | 
			
		||||
  args = parser.parse_args(sys.argv[1:])
 | 
			
		||||
  if ((args.paths is not None and args.all is not None) or
 | 
			
		||||
      (args.paths is None and args.all is None)):
 | 
			
		||||
    print 'Either --paths or --all must be used, but not both.\n'
 | 
			
		||||
    parser.print_help()
 | 
			
		||||
    sys.exit(1)
 | 
			
		||||
 | 
			
		||||
  paths = DIRECTORIES_TO_LINT if args.all else args.paths
 | 
			
		||||
  success, output = eslint.RunEslint(paths, extra_args=args.extra_args)
 | 
			
		||||
  print output
 | 
			
		||||
  sys.exit(not success)
 | 
			
		||||
							
								
								
									
										35
									
								
								tools/adb/systrace/catapult/common/eslint/bin/run_tests
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								tools/adb/systrace/catapult/common/eslint/bin/run_tests
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,35 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_CATAPULT_PATH = os.path.abspath(
 | 
			
		||||
    os.path.join(os.path.dirname(__file__),
 | 
			
		||||
    os.path.pardir, os.path.pardir, os.path.pardir))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_ESLINT_PATH = os.path.abspath(
 | 
			
		||||
    os.path.join(os.path.dirname(__file__), os.path.pardir))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _RunTestsOrDie(top_level_dir):
 | 
			
		||||
  exit_code = run_with_typ.Run(top_level_dir, path=[_ESLINT_PATH])
 | 
			
		||||
  if exit_code:
 | 
			
		||||
    sys.exit(exit_code)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _AddToPathIfNeeded(path):
 | 
			
		||||
  if path not in sys.path:
 | 
			
		||||
    sys.path.insert(0, path)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  _AddToPathIfNeeded(_CATAPULT_PATH)
 | 
			
		||||
 | 
			
		||||
  from catapult_build import run_with_typ
 | 
			
		||||
 | 
			
		||||
  _RunTestsOrDie(os.path.join(_ESLINT_PATH, 'eslint'))
 | 
			
		||||
							
								
								
									
										68
									
								
								tools/adb/systrace/catapult/common/eslint/eslint/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										68
									
								
								tools/adb/systrace/catapult/common/eslint/eslint/__init__.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,68 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_CATAPULT_PATH = os.path.join(
 | 
			
		||||
    os.path.dirname(os.path.abspath(__file__)),
 | 
			
		||||
    os.path.pardir, os.path.pardir, os.path.pardir)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _AddToPathIfNeeded(path):
 | 
			
		||||
  if path not in sys.path:
 | 
			
		||||
    sys.path.insert(0, path)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _UpdateSysPathIfNeeded():
 | 
			
		||||
  _AddToPathIfNeeded(os.path.join(_CATAPULT_PATH, 'common', 'node_runner'))
 | 
			
		||||
  _AddToPathIfNeeded(os.path.join(_CATAPULT_PATH, 'common', 'py_utils'))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_UpdateSysPathIfNeeded()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
import py_utils
 | 
			
		||||
from node_runner import node_util
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
BASE_ESLINT_CMD = [
 | 
			
		||||
  node_util.GetNodePath(),
 | 
			
		||||
  os.path.join(node_util.GetNodeModulesPath(), 'eslint', 'bin', 'eslint.js'),
 | 
			
		||||
  '--color'
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
DEFAULT_ESLINT_RULES_DIR = os.path.join(
 | 
			
		||||
    py_utils.GetCatapultDir(), 'common', 'eslint', 'rules')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _CreateEslintCommand(rulesdir, extra_args):
 | 
			
		||||
  eslint_cmd = BASE_ESLINT_CMD + [
 | 
			
		||||
      '--rulesdir', rulesdir, '--ext', '.js,.html'
 | 
			
		||||
  ]
 | 
			
		||||
  if extra_args:
 | 
			
		||||
    eslint_cmd.extend(extra_args.strip().split(' '))
 | 
			
		||||
  return eslint_cmd
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def RunEslint(paths, rules_dir=DEFAULT_ESLINT_RULES_DIR, extra_args=None):
 | 
			
		||||
  """Runs eslint on a list of paths.
 | 
			
		||||
 | 
			
		||||
  Args:
 | 
			
		||||
    paths: A list of paths to run eslint on.
 | 
			
		||||
    rules_dir: A directory of custom eslint rules.
 | 
			
		||||
    extra_args: A string to append to the end of the eslint command.
 | 
			
		||||
  """
 | 
			
		||||
  if type(paths) is not list or len(paths) == 0:
 | 
			
		||||
    raise ValueError('Must specify a non-empty list of paths to lint.')
 | 
			
		||||
 | 
			
		||||
  try:
 | 
			
		||||
    eslint_cmd = _CreateEslintCommand(rules_dir, extra_args)
 | 
			
		||||
    return True, subprocess.check_output(eslint_cmd + paths,
 | 
			
		||||
                                         stderr=subprocess.STDOUT).rstrip()
 | 
			
		||||
  except subprocess.CalledProcessError as e:
 | 
			
		||||
    return False, e.output.rstrip()
 | 
			
		||||
@ -0,0 +1,36 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import eslint
 | 
			
		||||
import os
 | 
			
		||||
import tempfile
 | 
			
		||||
import unittest
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_TEMP_FILE_CONTENTS = '''<!DOCTYPE html>
 | 
			
		||||
<!--
 | 
			
		||||
Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
found in the LICENSE file.
 | 
			
		||||
-->
 | 
			
		||||
<script>
 | 
			
		||||
// This should cause a linter error because we require camelCase.
 | 
			
		||||
var non_camel_case = 0;
 | 
			
		||||
</script>
 | 
			
		||||
'''
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SmokeTest(unittest.TestCase):
 | 
			
		||||
  def testEslintFindsError(self):
 | 
			
		||||
    try:
 | 
			
		||||
      tmp_file =  tempfile.NamedTemporaryFile(
 | 
			
		||||
          delete=False, dir=os.path.dirname(__file__), suffix=".html")
 | 
			
		||||
      tmp_file.write(_TEMP_FILE_CONTENTS)
 | 
			
		||||
      tmp_file.close()
 | 
			
		||||
 | 
			
		||||
      success, output = eslint.RunEslint([tmp_file.name])
 | 
			
		||||
      self.assertFalse(success)
 | 
			
		||||
      self.assertTrue('is not in camel case' in output)
 | 
			
		||||
    finally:
 | 
			
		||||
      os.remove(tmp_file.name)
 | 
			
		||||
@ -0,0 +1,154 @@
 | 
			
		||||
// Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
// Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
// found in the LICENSE file.
 | 
			
		||||
/* eslint-disable */
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * @fileoverview Rule to flag non-camelcased identifiers
 | 
			
		||||
 * @author Nicholas C. Zakas
 | 
			
		||||
 */
 | 
			
		||||
 | 
			
		||||
'use strict';
 | 
			
		||||
 | 
			
		||||
//------------------------------------------------------------------------------
 | 
			
		||||
// Rule Definition
 | 
			
		||||
//------------------------------------------------------------------------------
 | 
			
		||||
 | 
			
		||||
module.exports = {
 | 
			
		||||
    meta: {
 | 
			
		||||
        docs: {
 | 
			
		||||
            description: "enforce Catapult camelcase naming convention",
 | 
			
		||||
            category: "Stylistic Issues",
 | 
			
		||||
            recommended: false
 | 
			
		||||
        },
 | 
			
		||||
 | 
			
		||||
        schema: [
 | 
			
		||||
            {
 | 
			
		||||
                type: "object",
 | 
			
		||||
                properties: {
 | 
			
		||||
                    properties: {
 | 
			
		||||
                        enum: ["always", "never"]
 | 
			
		||||
                    }
 | 
			
		||||
                },
 | 
			
		||||
                additionalProperties: false
 | 
			
		||||
            }
 | 
			
		||||
        ]
 | 
			
		||||
    },
 | 
			
		||||
 | 
			
		||||
    create(context) {
 | 
			
		||||
 | 
			
		||||
        //--------------------------------------------------------------------------
 | 
			
		||||
        // Helpers
 | 
			
		||||
        //--------------------------------------------------------------------------
 | 
			
		||||
 | 
			
		||||
        // contains reported nodes to avoid reporting twice on destructuring with shorthand notation
 | 
			
		||||
        var reported = [];
 | 
			
		||||
 | 
			
		||||
        /**
 | 
			
		||||
         * Checks if a string contains an underscore and isn't all upper-case
 | 
			
		||||
         * @param {string} name The string to check.
 | 
			
		||||
         * @returns {boolean} if the string is underscored
 | 
			
		||||
         * @private
 | 
			
		||||
         */
 | 
			
		||||
        function isUnderscored(name) {
 | 
			
		||||
 | 
			
		||||
            // if there's an underscore, it might be A_VARANT, which is okay
 | 
			
		||||
            return name.indexOf("_") > -1 && name !== name.toUpperCase();
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        /**
 | 
			
		||||
         * Reports an AST node as a rule violation.
 | 
			
		||||
         * @param {ASTNode} node The node to report.
 | 
			
		||||
         * @returns {void}
 | 
			
		||||
         * @private
 | 
			
		||||
         */
 | 
			
		||||
        function report(node) {
 | 
			
		||||
            if (reported.indexOf(node) < 0) {
 | 
			
		||||
                reported.push(node);
 | 
			
		||||
                context.report(node, "Identifier '{{name}}' is not in camel case.", { name: node.name });
 | 
			
		||||
            }
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        var options = context.options[0] || {};
 | 
			
		||||
        let properties = options.properties || "";
 | 
			
		||||
 | 
			
		||||
        if (properties !== "always" && properties !== "never") {
 | 
			
		||||
            properties = "always";
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        return {
 | 
			
		||||
 | 
			
		||||
            Identifier(node) {
 | 
			
		||||
 | 
			
		||||
                /*
 | 
			
		||||
                 * Leading and trailing underscores are commonly used to flag
 | 
			
		||||
                 * private/protected identifiers, strip them.
 | 
			
		||||
                 *
 | 
			
		||||
                 * NOTE: This has four Catapult-specific style exceptions:
 | 
			
		||||
                 *
 | 
			
		||||
                 *   - The prefix opt_
 | 
			
		||||
                 *   - The prefix g_
 | 
			
		||||
                 *   - The suffix _smallerIsBetter
 | 
			
		||||
                 *   - The suffix _biggerIsBetter
 | 
			
		||||
                 */
 | 
			
		||||
                var name = node.name.replace(/(?:^opt_)|^(?:^g_)|^_+|_+$|(?:_smallerIsBetter)$|(?:_biggerIsBetter)$/g, ""),
 | 
			
		||||
                    effectiveParent = (node.parent.type === "MemberExpression") ? node.parent.parent : node.parent;
 | 
			
		||||
 | 
			
		||||
                // MemberExpressions get special rules
 | 
			
		||||
                if (node.parent.type === "MemberExpression") {
 | 
			
		||||
 | 
			
		||||
                    // "never" check properties
 | 
			
		||||
                    if (properties === "never") {
 | 
			
		||||
                        return;
 | 
			
		||||
                    }
 | 
			
		||||
 | 
			
		||||
                    // Always report underscored object names
 | 
			
		||||
                    if (node.parent.object.type === "Identifier" &&
 | 
			
		||||
                            node.parent.object.name === node.name &&
 | 
			
		||||
                            isUnderscored(name)) {
 | 
			
		||||
                        report(node);
 | 
			
		||||
 | 
			
		||||
                    // Report AssignmentExpressions only if they are the left side of the assignment
 | 
			
		||||
                    } else if (effectiveParent.type === "AssignmentExpression" &&
 | 
			
		||||
                            isUnderscored(name) &&
 | 
			
		||||
                            (effectiveParent.right.type !== "MemberExpression" ||
 | 
			
		||||
                            effectiveParent.left.type === "MemberExpression" &&
 | 
			
		||||
                            effectiveParent.left.property.name === node.name)) {
 | 
			
		||||
                        report(node);
 | 
			
		||||
                    }
 | 
			
		||||
 | 
			
		||||
                // Properties have their own rules
 | 
			
		||||
                } else if (node.parent.type === "Property") {
 | 
			
		||||
 | 
			
		||||
                    // "never" check properties
 | 
			
		||||
                    if (properties === "never") {
 | 
			
		||||
                        return;
 | 
			
		||||
                    }
 | 
			
		||||
 | 
			
		||||
                    if (node.parent.parent && node.parent.parent.type === "ObjectPattern" &&
 | 
			
		||||
                            node.parent.key === node && node.parent.value !== node) {
 | 
			
		||||
                        return;
 | 
			
		||||
                    }
 | 
			
		||||
 | 
			
		||||
                    if (isUnderscored(name) && effectiveParent.type !== "CallExpression") {
 | 
			
		||||
                        report(node);
 | 
			
		||||
                    }
 | 
			
		||||
 | 
			
		||||
                // Check if it's an import specifier
 | 
			
		||||
                } else if (["ImportSpecifier", "ImportNamespaceSpecifier", "ImportDefaultSpecifier"].indexOf(node.parent.type) >= 0) {
 | 
			
		||||
 | 
			
		||||
                    // Report only if the local imported identifier is underscored
 | 
			
		||||
                    if (node.parent.local && node.parent.local.name === node.name && isUnderscored(name)) {
 | 
			
		||||
                        report(node);
 | 
			
		||||
                    }
 | 
			
		||||
 | 
			
		||||
                // Report anything that is underscored that isn't a CallExpression
 | 
			
		||||
                } else if (isUnderscored(name) && effectiveParent.type !== "CallExpression") {
 | 
			
		||||
                    report(node);
 | 
			
		||||
                }
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
    }
 | 
			
		||||
};
 | 
			
		||||
@ -0,0 +1,324 @@
 | 
			
		||||
// Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
// Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
// found in the LICENSE file.
 | 
			
		||||
/* eslint-disable */
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * @fileoverview Tests for camelcase rule.
 | 
			
		||||
 * @author Nicholas C. Zakas
 | 
			
		||||
 */
 | 
			
		||||
 | 
			
		||||
'use strict';
 | 
			
		||||
 | 
			
		||||
//------------------------------------------------------------------------------
 | 
			
		||||
// Requirements
 | 
			
		||||
//------------------------------------------------------------------------------
 | 
			
		||||
 | 
			
		||||
var rule = require("../rules/catapult-camelcase"),
 | 
			
		||||
    RuleTester = require("../../node_runner/node_runner/node_modules/eslint/lib/testers/rule-tester");
 | 
			
		||||
 | 
			
		||||
//------------------------------------------------------------------------------
 | 
			
		||||
// Tests
 | 
			
		||||
//------------------------------------------------------------------------------
 | 
			
		||||
 | 
			
		||||
var ruleTester = new RuleTester();
 | 
			
		||||
 | 
			
		||||
ruleTester.run("camelcase", rule, {
 | 
			
		||||
    valid: [
 | 
			
		||||
        "firstName = \"Nicholas\"",
 | 
			
		||||
        "FIRST_NAME = \"Nicholas\"",
 | 
			
		||||
        "__myPrivateVariable = \"Patrick\"",
 | 
			
		||||
        "myPrivateVariable_ = \"Patrick\"",
 | 
			
		||||
        "function doSomething(){}",
 | 
			
		||||
        "do_something()",
 | 
			
		||||
        "foo.do_something()",
 | 
			
		||||
        "var foo = bar.baz_boom;",
 | 
			
		||||
        "var foo = bar.baz_boom.something;",
 | 
			
		||||
        "foo.boom_pow.qux = bar.baz_boom.something;",
 | 
			
		||||
        "if (bar.baz_boom) {}",
 | 
			
		||||
        "var obj = { key: foo.bar_baz };",
 | 
			
		||||
        "var arr = [foo.bar_baz];",
 | 
			
		||||
        "[foo.bar_baz]",
 | 
			
		||||
        "var arr = [foo.bar_baz.qux];",
 | 
			
		||||
        "[foo.bar_baz.nesting]",
 | 
			
		||||
        "if (foo.bar_baz === boom.bam_pow) { [foo.baz_boom] }",
 | 
			
		||||
        // These tests are for Catapult-specific exceptions.
 | 
			
		||||
        "opt_firstName = \"Nicholas\"",
 | 
			
		||||
        "g_firstName = \"Nicholas\"",
 | 
			
		||||
        "sizeInBytes_smallerIsBetter = \"Nicholas\"",
 | 
			
		||||
        "sizeInBytes_biggerIsBetter = \"Nicholas\"",
 | 
			
		||||
        {
 | 
			
		||||
            code: "var o = {key: 1}",
 | 
			
		||||
            options: [{properties: "always"}]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "var o = {bar_baz: 1}",
 | 
			
		||||
            options: [{properties: "never"}]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "obj.a_b = 2;",
 | 
			
		||||
            options: [{properties: "never"}]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "var obj = {\n a_a: 1 \n};\n obj.a_b = 2;",
 | 
			
		||||
            options: [{properties: "never"}]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "obj.foo_bar = function(){};",
 | 
			
		||||
            options: [{properties: "never"}]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "var { category_id: category } = query;",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6 }
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "var { category_id: category } = query;",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6 },
 | 
			
		||||
            options: [{properties: "never"}]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "import { camelCased } from \"external module\";",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6, sourceType: "module" }
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "import { no_camelcased as camelCased } from \"external-module\";",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6, sourceType: "module" }
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "import { no_camelcased as camelCased, anoterCamelCased } from \"external-module\";",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6, sourceType: "module" }
 | 
			
		||||
        }
 | 
			
		||||
    ],
 | 
			
		||||
    invalid: [
 | 
			
		||||
        {
 | 
			
		||||
            code: "first_name = \"Nicholas\"",
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'first_name' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "__private_first_name = \"Patrick\"",
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier '__private_first_name' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "function foo_bar(){}",
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'foo_bar' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "obj.foo_bar = function(){};",
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'foo_bar' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "bar_baz.foo = function(){};",
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'bar_baz' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "[foo_bar.baz]",
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'foo_bar' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "if (foo.bar_baz === boom.bam_pow) { [foo_bar.baz] }",
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'foo_bar' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "foo.bar_baz = boom.bam_pow",
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'bar_baz' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "var foo = { bar_baz: boom.bam_pow }",
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'bar_baz' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "foo.qux.boom_pow = { bar: boom.bam_pow }",
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'boom_pow' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "var o = {bar_baz: 1}",
 | 
			
		||||
            options: [{properties: "always"}],
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'bar_baz' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "obj.a_b = 2;",
 | 
			
		||||
            options: [{properties: "always"}],
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'a_b' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "obj.a_b = 2;",
 | 
			
		||||
            options: [{properties: "always"}],
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'a_b' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "var { category_id: category_id } = query;",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6 },
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'category_id' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "var { category_id } = query;",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6 },
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'category_id' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "import no_camelcased from \"external-module\";",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6, sourceType: "module" },
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'no_camelcased' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "import * as no_camelcased from \"external-module\";",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6, sourceType: "module" },
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'no_camelcased' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "import { no_camelcased } from \"external-module\";",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6, sourceType: "module" },
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'no_camelcased' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "import { no_camelcased as no_camel_cased } from \"external module\";",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6, sourceType: "module" },
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'no_camel_cased' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "import { camelCased as no_camel_cased } from \"external module\";",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6, sourceType: "module" },
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'no_camel_cased' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "import { camelCased, no_camelcased } from \"external-module\";",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6, sourceType: "module" },
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'no_camelcased' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "import { no_camelcased as camelCased, another_no_camelcased } from \"external-module\";",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6, sourceType: "module" },
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'another_no_camelcased' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "import camelCased, { no_camelcased } from \"external-module\";",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6, sourceType: "module" },
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'no_camelcased' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        },
 | 
			
		||||
        {
 | 
			
		||||
            code: "import no_camelcased, { another_no_camelcased as camelCased } from \"external-module\";",
 | 
			
		||||
            parserOptions: { ecmaVersion: 6, sourceType: "module" },
 | 
			
		||||
            errors: [
 | 
			
		||||
                {
 | 
			
		||||
                    message: "Identifier 'no_camelcased' is not in camel case.",
 | 
			
		||||
                    type: "Identifier"
 | 
			
		||||
                }
 | 
			
		||||
            ]
 | 
			
		||||
        }
 | 
			
		||||
    ]
 | 
			
		||||
});
 | 
			
		||||
							
								
								
									
										105
									
								
								tools/adb/systrace/catapult/common/lab/commits.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										105
									
								
								tools/adb/systrace/catapult/common/lab/commits.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,105 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
# Copyright 2015 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
"""Print statistics about the rate of commits to a repository."""
 | 
			
		||||
 | 
			
		||||
import datetime
 | 
			
		||||
import itertools
 | 
			
		||||
import json
 | 
			
		||||
import math
 | 
			
		||||
import urllib
 | 
			
		||||
import urllib2
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_BASE_URL = 'https://chromium.googlesource.com'
 | 
			
		||||
# Can be up to 10,000.
 | 
			
		||||
_REVISION_COUNT = 10000
 | 
			
		||||
 | 
			
		||||
_REPOSITORIES = [
 | 
			
		||||
    'chromium/src',
 | 
			
		||||
    'angle/angle',
 | 
			
		||||
    'skia',
 | 
			
		||||
    'v8/v8',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Pairwise(iterable):
 | 
			
		||||
  """s -> (s0,s1), (s1,s2), (s2, s3), ..."""
 | 
			
		||||
  a, b = itertools.tee(iterable)
 | 
			
		||||
  next(b, None)
 | 
			
		||||
  return itertools.izip(a, b)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Percentile(data, percentile):
 | 
			
		||||
  """Find a percentile of a list of values.
 | 
			
		||||
 | 
			
		||||
  Parameters:
 | 
			
		||||
    data: A sorted list of values.
 | 
			
		||||
    percentile: The percentile to look up, from 0.0 to 1.0.
 | 
			
		||||
 | 
			
		||||
  Returns:
 | 
			
		||||
    The percentile.
 | 
			
		||||
 | 
			
		||||
  Raises:
 | 
			
		||||
    ValueError: If data is empty.
 | 
			
		||||
  """
 | 
			
		||||
  if not data:
 | 
			
		||||
    raise ValueError()
 | 
			
		||||
 | 
			
		||||
  k = (len(data) - 1) * percentile
 | 
			
		||||
  f = math.floor(k)
 | 
			
		||||
  c = math.ceil(k)
 | 
			
		||||
 | 
			
		||||
  if f == c:
 | 
			
		||||
    return data[int(k)]
 | 
			
		||||
  return data[int(f)] * (c - k) + data[int(c)] * (k - f)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def CommitTimes(repository, revision_count):
 | 
			
		||||
  parameters = urllib.urlencode((('n', revision_count), ('format', 'JSON')))
 | 
			
		||||
  url = '%s/%s/+log?%s' % (_BASE_URL, urllib.quote(repository), parameters)
 | 
			
		||||
  data = json.loads(''.join(urllib2.urlopen(url).read().splitlines()[1:]))
 | 
			
		||||
 | 
			
		||||
  commit_times = []
 | 
			
		||||
  for revision in data['log']:
 | 
			
		||||
    commit_time_string = revision['committer']['time']
 | 
			
		||||
    commit_time = datetime.datetime.strptime(
 | 
			
		||||
        commit_time_string, '%a %b %d %H:%M:%S %Y')
 | 
			
		||||
    commit_times.append(commit_time - datetime.timedelta(hours=7))
 | 
			
		||||
 | 
			
		||||
  return commit_times
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def IsWeekday(time):
 | 
			
		||||
  return time.weekday() >= 0 and time.weekday() < 5
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main():
 | 
			
		||||
  for repository in _REPOSITORIES:
 | 
			
		||||
    commit_times = CommitTimes(repository, _REVISION_COUNT)
 | 
			
		||||
 | 
			
		||||
    commit_durations = []
 | 
			
		||||
    for time1, time2 in Pairwise(commit_times):
 | 
			
		||||
      #if not (IsWeekday(time1) and IsWeekday(time2)):
 | 
			
		||||
      #  continue
 | 
			
		||||
      commit_durations.append((time1 - time2).total_seconds() / 60.)
 | 
			
		||||
    commit_durations.sort()
 | 
			
		||||
 | 
			
		||||
    print 'REPOSITORY:', repository
 | 
			
		||||
    print 'Start Date:', min(commit_times), 'PDT'
 | 
			
		||||
    print '  End Date:', max(commit_times), 'PDT'
 | 
			
		||||
    print '  Duration:', max(commit_times) - min(commit_times)
 | 
			
		||||
    print '         n:', len(commit_times)
 | 
			
		||||
 | 
			
		||||
    for p in (0.25, 0.50, 0.90):
 | 
			
		||||
      percentile = Percentile(commit_durations, p)
 | 
			
		||||
      print '%3d%% commit duration:' % (p * 100), '%6.1fm' % percentile
 | 
			
		||||
    mean = math.fsum(commit_durations) / len(commit_durations)
 | 
			
		||||
    print 'Mean commit duration:', '%6.1fm' % mean
 | 
			
		||||
    print
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  main()
 | 
			
		||||
							
								
								
									
										93
									
								
								tools/adb/systrace/catapult/common/lab/hardware.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										93
									
								
								tools/adb/systrace/catapult/common/lab/hardware.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,93 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
# Copyright 2015 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
"""Query build slave hardware info, and print it to stdout as csv."""
 | 
			
		||||
 | 
			
		||||
import csv
 | 
			
		||||
import json
 | 
			
		||||
import logging
 | 
			
		||||
import sys
 | 
			
		||||
import urllib2
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_MASTERS = [
 | 
			
		||||
    'chromium.perf',
 | 
			
		||||
    'chromium.perf.fyi',
 | 
			
		||||
    'client.catapult',
 | 
			
		||||
    'tryserver.chromium.perf',
 | 
			
		||||
    'tryserver.client.catapult',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_KEYS = [
 | 
			
		||||
    'master', 'builder', 'hostname',
 | 
			
		||||
 | 
			
		||||
    'os family', 'os version', 'bitness (userland)',
 | 
			
		||||
 | 
			
		||||
    'product name', 'architecture', 'processor count', 'processor type',
 | 
			
		||||
    'memory total',
 | 
			
		||||
 | 
			
		||||
    'facter version', 'git version', 'puppet version', 'python version',
 | 
			
		||||
    'ruby version',
 | 
			
		||||
 | 
			
		||||
    'android device 1', 'android device 2', 'android device 3',
 | 
			
		||||
    'android device 4', 'android device 5', 'android device 6',
 | 
			
		||||
    'android device 7', 'android device 8',
 | 
			
		||||
]
 | 
			
		||||
_EXCLUDED_KEYS = frozenset([
 | 
			
		||||
    'architecture (userland)',
 | 
			
		||||
    'b directory',
 | 
			
		||||
    'last puppet run',
 | 
			
		||||
    'uptime',
 | 
			
		||||
    'windows version',
 | 
			
		||||
])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def main():
 | 
			
		||||
  writer = csv.DictWriter(sys.stdout, _KEYS)
 | 
			
		||||
  writer.writeheader()
 | 
			
		||||
 | 
			
		||||
  for master_name in _MASTERS:
 | 
			
		||||
    master_data = json.load(urllib2.urlopen(
 | 
			
		||||
        'http://build.chromium.org/p/%s/json/slaves' % master_name))
 | 
			
		||||
 | 
			
		||||
    slaves = sorted(master_data.iteritems(),
 | 
			
		||||
                    key=lambda x: (x[1]['builders'].keys(), x[0]))
 | 
			
		||||
    for slave_name, slave_data in slaves:
 | 
			
		||||
      for builder_name in slave_data['builders']:
 | 
			
		||||
        row = {
 | 
			
		||||
            'master': master_name,
 | 
			
		||||
            'builder': builder_name,
 | 
			
		||||
            'hostname': slave_name,
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        host_data = slave_data['host']
 | 
			
		||||
        if host_data:
 | 
			
		||||
          host_data = host_data.splitlines()
 | 
			
		||||
          if len(host_data) > 1:
 | 
			
		||||
            for line in host_data:
 | 
			
		||||
              if not line:
 | 
			
		||||
                continue
 | 
			
		||||
              key, value = line.split(': ')
 | 
			
		||||
              if key in _EXCLUDED_KEYS:
 | 
			
		||||
                continue
 | 
			
		||||
              row[key] = value
 | 
			
		||||
 | 
			
		||||
        # Munge keys.
 | 
			
		||||
        row = {key.replace('_', ' '): value for key, value in row.iteritems()}
 | 
			
		||||
        if 'osfamily' in row:
 | 
			
		||||
          row['os family'] = row.pop('osfamily')
 | 
			
		||||
        if 'product name' not in row and slave_name.startswith('slave'):
 | 
			
		||||
          row['product name'] = 'Google Compute Engine'
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
          writer.writerow(row)
 | 
			
		||||
        except ValueError:
 | 
			
		||||
          logging.error(row)
 | 
			
		||||
          raise
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  main()
 | 
			
		||||
							
								
								
									
										15
									
								
								tools/adb/systrace/catapult/common/lab/keychain_unlock.sh
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										15
									
								
								tools/adb/systrace/catapult/common/lab/keychain_unlock.sh
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,15 @@
 | 
			
		||||
#!/bin/sh
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
#
 | 
			
		||||
# Script to SSH into a list of bots and set up their keychains for Telemetry.
 | 
			
		||||
# https://www.chromium.org/developers/telemetry/telemetry-mac-keychain-setup
 | 
			
		||||
 | 
			
		||||
for hostname in "$@"
 | 
			
		||||
do
 | 
			
		||||
  ssh -t "$hostname" 'security unlock-keychain login.keychain
 | 
			
		||||
security delete-generic-password -s "Chrome Safe Storage" login.keychain
 | 
			
		||||
security add-generic-password -a Chrome -w "+NTclOvR4wLMgRlLIL9bHQ==" \
 | 
			
		||||
  -s "Chrome Safe Storage" -A login.keychain'
 | 
			
		||||
done
 | 
			
		||||
@ -0,0 +1,11 @@
 | 
			
		||||
Update binaries:
 | 
			
		||||
 | 
			
		||||
1. Download archives pre-compiled binaries.
 | 
			
		||||
2. Unzip archives.
 | 
			
		||||
3. Re-zip just the binary:
 | 
			
		||||
   `zip new.zip node-v10.14.1-linux-x64/bin/node`
 | 
			
		||||
4. Use the update script:
 | 
			
		||||
   `./dependency_manager/bin/update --config
 | 
			
		||||
   common/node_runner/node_runner/node_binaries.json --dependency node --path
 | 
			
		||||
   new.zip --platform linux_x86_64`
 | 
			
		||||
5. Mail out the automated change to `node_binaries.json` for review and CQ.
 | 
			
		||||
@ -0,0 +1,4 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,53 @@
 | 
			
		||||
#!/usr/bin/env node
 | 
			
		||||
'use strict';
 | 
			
		||||
/*
 | 
			
		||||
Copyright 2018 The Chromium Authors. All rights reserved.
 | 
			
		||||
Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
This script wraps common HTML transformations including stripping whitespace and
 | 
			
		||||
comments from HTML, CSS, and Javascript.
 | 
			
		||||
*/
 | 
			
		||||
const dom5 = require('dom5');
 | 
			
		||||
const escodegen = require('escodegen');
 | 
			
		||||
const espree = require('espree');
 | 
			
		||||
const fs = require('fs');
 | 
			
		||||
const nopt = require('nopt');
 | 
			
		||||
 | 
			
		||||
const args = nopt();
 | 
			
		||||
const filename = args.argv.remain[0];
 | 
			
		||||
 | 
			
		||||
let html = fs.readFileSync(filename).toString('utf8');
 | 
			
		||||
let parsedHtml = dom5.parse(html);
 | 
			
		||||
// First, collapse text nodes around comments (by removing comment nodes,
 | 
			
		||||
// re-serializing, and re-parsing) in order to prevent multiple extraneous
 | 
			
		||||
// newlines.
 | 
			
		||||
for (const node of dom5.nodeWalkAll(parsedHtml, () => true)) {
 | 
			
		||||
  if (dom5.isCommentNode(node)) {
 | 
			
		||||
    dom5.remove(node);
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
html = dom5.serialize(parsedHtml);
 | 
			
		||||
parsedHtml = dom5.parse(html);
 | 
			
		||||
// Some of these transformations are based on polyclean:
 | 
			
		||||
// https://github.com/googlearchive/polyclean
 | 
			
		||||
for (const node of dom5.nodeWalkAll(parsedHtml, () => true)) {
 | 
			
		||||
  if (dom5.isTextNode(node)) {
 | 
			
		||||
    dom5.setTextContent(node, dom5.getTextContent(node)
 | 
			
		||||
      .replace(/ *\n+ */g, '\n')
 | 
			
		||||
      .replace(/\n+/g, '\n'));
 | 
			
		||||
  } else if (dom5.predicates.hasTagName('script')(node) &&
 | 
			
		||||
             !dom5.predicates.hasAttr('src')(node)) {
 | 
			
		||||
    let text = dom5.getTextContent(node);
 | 
			
		||||
    const ast = espree.parse(text, {ecmaVersion: 2018});
 | 
			
		||||
    text = escodegen.generate(ast, {format: {indent: {style: ''}}});
 | 
			
		||||
    dom5.setTextContent(node, text);
 | 
			
		||||
  } else if (dom5.predicates.hasTagName('style')(node)) {
 | 
			
		||||
    dom5.setTextContent(node, dom5.getTextContent(node)
 | 
			
		||||
      .replace(/[\r\n]/g, '')
 | 
			
		||||
      .replace(/ {2,}/g, ' ')
 | 
			
		||||
      .replace(/(^|[;,\:\{\}]) /g, '$1')
 | 
			
		||||
      .replace(/ ($|[;,\{\}])/g, '$1'));
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
fs.writeFileSync(filename, dom5.serialize(parsedHtml));
 | 
			
		||||
@ -0,0 +1,21 @@
 | 
			
		||||
#!/usr/bin/env node
 | 
			
		||||
'use strict';
 | 
			
		||||
/*
 | 
			
		||||
Copyright 2019 The Chromium Authors. All rights reserved.
 | 
			
		||||
Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
This script strips whitespace and comments from Javascript.
 | 
			
		||||
*/
 | 
			
		||||
const escodegen = require('escodegen');
 | 
			
		||||
const espree = require('espree');
 | 
			
		||||
const fs = require('fs');
 | 
			
		||||
const nopt = require('nopt');
 | 
			
		||||
 | 
			
		||||
const args = nopt();
 | 
			
		||||
const filename = args.argv.remain[0];
 | 
			
		||||
 | 
			
		||||
let text = fs.readFileSync(filename).toString('utf8');
 | 
			
		||||
const ast = espree.parse(text, {ecmaVersion: 2018});
 | 
			
		||||
text = escodegen.generate(ast, {format: {indent: {style: ''}}});
 | 
			
		||||
fs.writeFileSync(filename, text);
 | 
			
		||||
@ -0,0 +1,53 @@
 | 
			
		||||
{
 | 
			
		||||
  "config_type": "BaseConfig",
 | 
			
		||||
  "dependencies": {
 | 
			
		||||
    "node": {
 | 
			
		||||
      "cloud_storage_base_folder": "binary_dependencies",
 | 
			
		||||
      "cloud_storage_bucket": "chromium-telemetry",
 | 
			
		||||
      "file_info": {
 | 
			
		||||
        "linux_x86_64": {
 | 
			
		||||
          "cloud_storage_hash": "27ad092b0ce59d2da32090a00f717f0c31e65240",
 | 
			
		||||
          "download_path": "bin/node/node-linux64.zip",
 | 
			
		||||
          "path_within_archive": "node-v10.14.1-linux-x64/bin/node",
 | 
			
		||||
          "version_in_cs": "6.7.0"
 | 
			
		||||
        },
 | 
			
		||||
        "mac_x86_64": {
 | 
			
		||||
          "cloud_storage_hash": "1af7c221e530165af8a6ab8ff7ccb1f2dd54036d",
 | 
			
		||||
          "download_path": "bin/node/node-mac64.zip",
 | 
			
		||||
          "path_within_archive": "node-v6.7.0-darwin-x64/bin/node",
 | 
			
		||||
          "version_in_cs": "6.7.0"
 | 
			
		||||
        },
 | 
			
		||||
        "win_AMD64": {
 | 
			
		||||
          "cloud_storage_hash": "23f21bfb2edf874a8b6bdb6c1acb408bc7edeced",
 | 
			
		||||
          "download_path": "bin/node/node-win64.zip",
 | 
			
		||||
          "path_within_archive": "node-v6.7.0-win-x64/node.exe",
 | 
			
		||||
          "version_in_cs": "6.7.0"
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "npm": {
 | 
			
		||||
      "cloud_storage_base_folder": "binary_dependencies",
 | 
			
		||||
      "cloud_storage_bucket": "chromium-telemetry",
 | 
			
		||||
      "file_info": {
 | 
			
		||||
        "linux_x86_64": {
 | 
			
		||||
          "cloud_storage_hash": "5750e968975e7f5ab8cb694f5e92a34a890e129d",
 | 
			
		||||
          "download_path": "bin/node/node-linux64.zip",
 | 
			
		||||
          "path_within_archive": "node-v6.7.0-linux-x64/lib/node_modules/npm/bin/npm-cli.js",
 | 
			
		||||
          "version_in_cs": "6.7.0"
 | 
			
		||||
        },
 | 
			
		||||
        "mac_x86_64": {
 | 
			
		||||
          "cloud_storage_hash": "1af7c221e530165af8a6ab8ff7ccb1f2dd54036d",
 | 
			
		||||
          "download_path": "bin/node/node-mac64.zip",
 | 
			
		||||
          "path_within_archive": "node-v6.7.0-darwin-x64/lib/node_modules/npm/bin/npm-cli.js",
 | 
			
		||||
          "version_in_cs": "6.7.0"
 | 
			
		||||
        },
 | 
			
		||||
        "win_AMD64": {
 | 
			
		||||
          "cloud_storage_hash": "23f21bfb2edf874a8b6bdb6c1acb408bc7edeced",
 | 
			
		||||
          "download_path": "bin/node/node-win64.zip",
 | 
			
		||||
          "path_within_archive": "node-v6.7.0-win-x64\\node_modules\\npm\\bin\\npm-cli.js",
 | 
			
		||||
          "version_in_cs": "6.7.0"
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@ -0,0 +1,60 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
import py_utils
 | 
			
		||||
from py_utils import binary_manager
 | 
			
		||||
from py_utils import dependency_util
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _NodeBinariesConfigPath():
 | 
			
		||||
  return os.path.realpath(os.path.join(
 | 
			
		||||
      os.path.dirname(os.path.abspath(__file__)), 'node_binaries.json'))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class _NodeManager(object):
 | 
			
		||||
  def __init__(self):
 | 
			
		||||
    self.bm = binary_manager.BinaryManager(
 | 
			
		||||
        [_NodeBinariesConfigPath()])
 | 
			
		||||
    self.os_name = dependency_util.GetOSNameForCurrentDesktopPlatform()
 | 
			
		||||
    self.arch_name = dependency_util.GetArchForCurrentDesktopPlatform(
 | 
			
		||||
        self.os_name)
 | 
			
		||||
    self.node_path = self.bm.FetchPath('node', self.os_name, self.arch_name)
 | 
			
		||||
    self.npm_path = self.bm.FetchPath('npm', self.os_name, self.arch_name)
 | 
			
		||||
 | 
			
		||||
    self.node_initialized = False
 | 
			
		||||
 | 
			
		||||
  def InitNode(self):
 | 
			
		||||
    if self.node_initialized:
 | 
			
		||||
      return  # So we only init once per run
 | 
			
		||||
    self.node_initialized = True
 | 
			
		||||
    old_dir = os.path.abspath(os.curdir)
 | 
			
		||||
    os.chdir(os.path.join(os.path.abspath(
 | 
			
		||||
        py_utils.GetCatapultDir()), 'common', 'node_runner', 'node_runner'))
 | 
			
		||||
    subprocess.call([self.node_path, self.npm_path, 'install'])
 | 
			
		||||
    os.chdir(old_dir)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_NODE_MANAGER = _NodeManager()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def InitNode():
 | 
			
		||||
  _NODE_MANAGER.InitNode()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetNodePath():
 | 
			
		||||
  return _NODE_MANAGER.node_path
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetNodeModulesPath():
 | 
			
		||||
  _NODE_MANAGER.InitNode()
 | 
			
		||||
  path = os.path.abspath(os.path.join(os.path.dirname(__file__),
 | 
			
		||||
                                      'node_modules'))
 | 
			
		||||
  if sys.platform.startswith('win'):
 | 
			
		||||
    # Escape path on Windows because it's very long and must be passed to NTFS.
 | 
			
		||||
    path = u'\\\\?\\' + path
 | 
			
		||||
  return path
 | 
			
		||||
							
								
								
									
										7189
									
								
								tools/adb/systrace/catapult/common/node_runner/node_runner/package-lock.json
									
									
									
										generated
									
									
									
										Normal file
									
								
							
							
						
						
									
										7189
									
								
								tools/adb/systrace/catapult/common/node_runner/node_runner/package-lock.json
									
									
									
										generated
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							@ -0,0 +1,64 @@
 | 
			
		||||
{
 | 
			
		||||
  "name": "catapult_base",
 | 
			
		||||
  "version": "1.0.0",
 | 
			
		||||
  "description": "Catapult project base",
 | 
			
		||||
  "repository": {
 | 
			
		||||
    "type": "git",
 | 
			
		||||
    "url": "https://github.com/catapult-project/catapult/tree/master/catapult_base"
 | 
			
		||||
  },
 | 
			
		||||
  "main": "index.js",
 | 
			
		||||
  "scripts": {
 | 
			
		||||
    "test": "cd ../../../dashboard/dashboard/spa && karma start --coverage --no-colors"
 | 
			
		||||
  },
 | 
			
		||||
  "author": "The Chromium Authors",
 | 
			
		||||
  "license": "BSD-2-Clause",
 | 
			
		||||
  "gypfile": false,
 | 
			
		||||
  "private": true,
 | 
			
		||||
  "dependencies": {
 | 
			
		||||
    "dot-prop-immutable": "1.5.0",
 | 
			
		||||
    "@chopsui/result-channel": "0.1.0",
 | 
			
		||||
    "@chopsui/batch-iterator": "0.1.0",
 | 
			
		||||
    "@chopsui/chops-button": "0.1.11",
 | 
			
		||||
    "@chopsui/chops-checkbox": "0.1.11",
 | 
			
		||||
    "@chopsui/chops-input": "0.1.11",
 | 
			
		||||
    "@chopsui/chops-loading": "0.1.11",
 | 
			
		||||
    "@chopsui/chops-radio": "0.1.11",
 | 
			
		||||
    "@chopsui/chops-radio-group": "0.1.11",
 | 
			
		||||
    "@chopsui/chops-switch": "0.1.11",
 | 
			
		||||
    "@chopsui/chops-tab": "0.1.11",
 | 
			
		||||
    "@chopsui/chops-tab-bar": "0.1.11",
 | 
			
		||||
    "@chopsui/chops-textarea": "0.1.11",
 | 
			
		||||
    "@chopsui/tsmon-client": "0.0.1",
 | 
			
		||||
    "@chopsui/chops-header": "0.1.5",
 | 
			
		||||
    "@chopsui/chops-signin": "0.1.5",
 | 
			
		||||
    "@polymer/app-route": "^3.0.0",
 | 
			
		||||
    "@polymer/iron-collapse": "^3.0.0",
 | 
			
		||||
    "@polymer/iron-icon": "^3.0.0",
 | 
			
		||||
    "@polymer/iron-iconset-svg": "^3.0.0",
 | 
			
		||||
    "@polymer/polymer": "^3.0.0",
 | 
			
		||||
    "chai": "^4.0.2",
 | 
			
		||||
    "dom5": "^1.0.0",
 | 
			
		||||
    "escodegen": "^1.11.0",
 | 
			
		||||
    "eslint": "^4.0.0",
 | 
			
		||||
    "eslint-config-google": "^0.6.0",
 | 
			
		||||
    "eslint-plugin-html": "^4.0.0",
 | 
			
		||||
    "espree": "^3.0.0",
 | 
			
		||||
    "istanbul-instrumenter-loader": "^3.0.1",
 | 
			
		||||
    "lit-element": "^2.0.0",
 | 
			
		||||
    "karma": "^4.0.0",
 | 
			
		||||
    "karma-chrome-launcher": "^2.2.0",
 | 
			
		||||
    "karma-coverage": "^1.1.2",
 | 
			
		||||
    "karma-mocha": "^1.3.0",
 | 
			
		||||
    "karma-sinon": "^1.0.5",
 | 
			
		||||
    "karma-sourcemap-loader": "^0.3.7",
 | 
			
		||||
    "karma-webpack": "4.0.0-rc.6",
 | 
			
		||||
    "mocha": "^5.2.0",
 | 
			
		||||
    "path": "^0.12.7",
 | 
			
		||||
    "puppeteer": "^1.10.0",
 | 
			
		||||
    "redux": "^4.0.0",
 | 
			
		||||
    "sinon": "^7.2.3",
 | 
			
		||||
    "vulcanize": "^1.16.0",
 | 
			
		||||
    "webpack": "^4.16.1",
 | 
			
		||||
    "webpack-command": "^0.4.1"
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@ -0,0 +1,7 @@
 | 
			
		||||
py_trace_event allows low-overhead instrumentation of a multi-threaded,
 | 
			
		||||
multi-process application in order to study its global performance
 | 
			
		||||
characteristics. It uses the trace event format used in Chromium/Chrome's
 | 
			
		||||
about:tracing system.
 | 
			
		||||
 | 
			
		||||
Trace files generated by py_trace_event can be viewed and manipulated by
 | 
			
		||||
trace_event_viewer.
 | 
			
		||||
@ -0,0 +1,35 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
_CATAPULT_PATH = os.path.abspath(
 | 
			
		||||
    os.path.join(os.path.dirname(__file__), '..', '..', '..'))
 | 
			
		||||
 | 
			
		||||
_PY_TRACE_EVENT_PATH = os.path.abspath(
 | 
			
		||||
    os.path.join(os.path.dirname(__file__), '..'))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _RunTestsOrDie(top_level_dir):
 | 
			
		||||
  # Need everything in one process for tracing to work.
 | 
			
		||||
  exit_code = run_with_typ.Run(
 | 
			
		||||
      top_level_dir, path=[_PY_TRACE_EVENT_PATH], jobs=1)
 | 
			
		||||
  if exit_code:
 | 
			
		||||
    sys.exit(exit_code)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _AddToPathIfNeeded(path):
 | 
			
		||||
  if path not in sys.path:
 | 
			
		||||
    sys.path.insert(0, path)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  _AddToPathIfNeeded(_CATAPULT_PATH)
 | 
			
		||||
 | 
			
		||||
  from catapult_build import run_with_typ
 | 
			
		||||
 | 
			
		||||
  _RunTestsOrDie(_PY_TRACE_EVENT_PATH)
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,12 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__))
 | 
			
		||||
PY_UTILS = os.path.abspath(os.path.join(SCRIPT_DIR, '..', '..', 'py_utils'))
 | 
			
		||||
PROTOBUF = os.path.abspath(os.path.join(
 | 
			
		||||
    SCRIPT_DIR, '..', 'third_party', 'protobuf'))
 | 
			
		||||
sys.path.append(PY_UTILS)
 | 
			
		||||
sys.path.append(PROTOBUF)
 | 
			
		||||
@ -0,0 +1,12 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
# Copyright 2011 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
from distutils.core import setup
 | 
			
		||||
setup(
 | 
			
		||||
    name='py_trace_event',
 | 
			
		||||
    packages=['trace_event_impl'],
 | 
			
		||||
    version='0.1.0',
 | 
			
		||||
    description='Performance tracing for python',
 | 
			
		||||
    author='Nat Duca'
 | 
			
		||||
)
 | 
			
		||||
@ -0,0 +1,295 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
from py_trace_event import trace_time
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
r"""Instrumentation-based profiling for Python.
 | 
			
		||||
 | 
			
		||||
trace_event allows you to hand-instrument your code with areas of interest.
 | 
			
		||||
When enabled, trace_event logs the start and stop times of these events to a
 | 
			
		||||
logfile. These resulting logfiles can be viewed with either Chrome's
 | 
			
		||||
about:tracing UI or with the standalone trace_event_viewer available at
 | 
			
		||||
  http://www.github.com/natduca/trace_event_viewer/
 | 
			
		||||
 | 
			
		||||
To use trace event, call trace_event_enable and start instrumenting your code:
 | 
			
		||||
   from trace_event import *
 | 
			
		||||
 | 
			
		||||
   if "--trace" in sys.argv:
 | 
			
		||||
     trace_enable("myfile.trace")
 | 
			
		||||
 | 
			
		||||
   @traced
 | 
			
		||||
   def foo():
 | 
			
		||||
     ...
 | 
			
		||||
 | 
			
		||||
   class MyFoo(object):
 | 
			
		||||
     @traced
 | 
			
		||||
     def bar(self):
 | 
			
		||||
       ...
 | 
			
		||||
 | 
			
		||||
trace_event records trace events to an in-memory buffer. If your application is
 | 
			
		||||
long running and you want to see the results of a trace before it exits, you can
 | 
			
		||||
call trace_flush to write any in-memory events to disk.
 | 
			
		||||
 | 
			
		||||
To help intregrating trace_event into existing codebases that dont want to add
 | 
			
		||||
trace_event as a dependancy, trace_event is split into an import shim
 | 
			
		||||
(trace_event.py) and an implementaiton (trace_event_impl/*). You can copy the
 | 
			
		||||
shim, trace_event.py, directly into your including codebase. If the
 | 
			
		||||
trace_event_impl is not found, the shim will simply noop.
 | 
			
		||||
 | 
			
		||||
trace_event is safe with regard to Python threads. Simply trace as you normally
 | 
			
		||||
would and each thread's timing will show up in the trace file.
 | 
			
		||||
 | 
			
		||||
Multiple processes can safely output into a single trace_event logfile. If you
 | 
			
		||||
fork after enabling tracing, the child process will continue outputting to the
 | 
			
		||||
logfile. Use of the multiprocessing module will work as well. In both cases,
 | 
			
		||||
however, note that disabling tracing in the parent process will not stop tracing
 | 
			
		||||
in the child processes.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
try:
 | 
			
		||||
  import trace_event_impl
 | 
			
		||||
except ImportError:
 | 
			
		||||
  trace_event_impl = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def trace_can_enable():
 | 
			
		||||
  """
 | 
			
		||||
  Returns True if a trace_event_impl was found. If false,
 | 
			
		||||
  trace_enable will fail. Regular tracing methods, including
 | 
			
		||||
  trace_begin and trace_end, will simply be no-ops.
 | 
			
		||||
  """
 | 
			
		||||
  return trace_event_impl != None
 | 
			
		||||
 | 
			
		||||
# Default TracedMetaClass to type incase trace_event_impl is not defined.
 | 
			
		||||
# This is to avoid exception during import time since TracedMetaClass typically
 | 
			
		||||
# used in class definition scope.
 | 
			
		||||
TracedMetaClass = type
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if trace_event_impl:
 | 
			
		||||
  import time
 | 
			
		||||
 | 
			
		||||
  # Trace file formats
 | 
			
		||||
  JSON = trace_event_impl.JSON
 | 
			
		||||
  JSON_WITH_METADATA = trace_event_impl.JSON_WITH_METADATA
 | 
			
		||||
  PROTOBUF = trace_event_impl.PROTOBUF
 | 
			
		||||
 | 
			
		||||
  def trace_is_enabled():
 | 
			
		||||
    return trace_event_impl.trace_is_enabled()
 | 
			
		||||
 | 
			
		||||
  def trace_enable(logfile, format=None):
 | 
			
		||||
    return trace_event_impl.trace_enable(logfile, format)
 | 
			
		||||
 | 
			
		||||
  def trace_disable():
 | 
			
		||||
    return trace_event_impl.trace_disable()
 | 
			
		||||
 | 
			
		||||
  def trace_flush():
 | 
			
		||||
    trace_event_impl.trace_flush()
 | 
			
		||||
 | 
			
		||||
  def trace_begin(name, **kwargs):
 | 
			
		||||
    args_to_log = {key: repr(value) for key, value in kwargs.iteritems()}
 | 
			
		||||
    trace_event_impl.add_trace_event("B", trace_time.Now(), "python", name,
 | 
			
		||||
                                     args_to_log)
 | 
			
		||||
 | 
			
		||||
  def trace_end(name):
 | 
			
		||||
    trace_event_impl.add_trace_event("E", trace_time.Now(), "python", name)
 | 
			
		||||
 | 
			
		||||
  def trace_set_thread_name(thread_name):
 | 
			
		||||
    trace_event_impl.add_trace_event("M", trace_time.Now(), "__metadata",
 | 
			
		||||
                                     "thread_name", {"name": thread_name})
 | 
			
		||||
 | 
			
		||||
  def trace_add_benchmark_metadata(*args, **kwargs):
 | 
			
		||||
    trace_event_impl.trace_add_benchmark_metadata(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
  def trace(name, **kwargs):
 | 
			
		||||
    return trace_event_impl.trace(name, **kwargs)
 | 
			
		||||
 | 
			
		||||
  TracedMetaClass = trace_event_impl.TracedMetaClass
 | 
			
		||||
 | 
			
		||||
  def traced(fn):
 | 
			
		||||
    return trace_event_impl.traced(fn)
 | 
			
		||||
 | 
			
		||||
  def clock_sync(sync_id, issue_ts=None):
 | 
			
		||||
    '''
 | 
			
		||||
    Add a clock sync event to the trace log.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
      sync_id: ID of clock sync event.
 | 
			
		||||
      issue_ts: Time at which clock sync was issued, in microseconds.
 | 
			
		||||
    '''
 | 
			
		||||
    time_stamp = trace_time.Now()
 | 
			
		||||
    args_to_log = {'sync_id': sync_id}
 | 
			
		||||
    if issue_ts: # Issuer if issue_ts is set, else reciever.
 | 
			
		||||
      assert issue_ts <= time_stamp
 | 
			
		||||
      args_to_log['issue_ts'] = issue_ts
 | 
			
		||||
    trace_event_impl.add_trace_event(
 | 
			
		||||
        "c", time_stamp, "python", "clock_sync", args_to_log)
 | 
			
		||||
 | 
			
		||||
  def is_tracing_controllable():
 | 
			
		||||
    return trace_event_impl.is_tracing_controllable()
 | 
			
		||||
 | 
			
		||||
else:
 | 
			
		||||
  import contextlib
 | 
			
		||||
 | 
			
		||||
  # Trace file formats
 | 
			
		||||
  JSON = None
 | 
			
		||||
  JSON_WITH_METADATA = None
 | 
			
		||||
  PROTOBUF = None
 | 
			
		||||
 | 
			
		||||
  def trace_enable():
 | 
			
		||||
    raise TraceException(
 | 
			
		||||
        "Cannot enable trace_event. No trace_event_impl module found.")
 | 
			
		||||
 | 
			
		||||
  def trace_disable():
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
  def trace_is_enabled():
 | 
			
		||||
    return False
 | 
			
		||||
 | 
			
		||||
  def trace_flush():
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
  def trace_begin(name, **kwargs):
 | 
			
		||||
    del name # unused.
 | 
			
		||||
    del kwargs # unused.
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
  def trace_end(name):
 | 
			
		||||
    del name # unused.
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
  def trace_set_thread_name(thread_name):
 | 
			
		||||
    del thread_name # unused.
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
  @contextlib.contextmanager
 | 
			
		||||
  def trace(name, **kwargs):
 | 
			
		||||
    del name # unused
 | 
			
		||||
    del kwargs # unused
 | 
			
		||||
    yield
 | 
			
		||||
 | 
			
		||||
  def traced(fn):
 | 
			
		||||
    return fn
 | 
			
		||||
 | 
			
		||||
  def clock_sync(sync_id, issue_ts=None):
 | 
			
		||||
    del sync_id # unused.
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
  def is_tracing_controllable():
 | 
			
		||||
    return False
 | 
			
		||||
 | 
			
		||||
trace_enable.__doc__ = """Enables tracing.
 | 
			
		||||
 | 
			
		||||
  Once enabled, the enabled bit propagates to forked processes and
 | 
			
		||||
  multiprocessing subprocesses. Regular child processes, e.g. those created via
 | 
			
		||||
  os.system/popen, or subprocess.Popen instances, will not get traced. You can,
 | 
			
		||||
  however, enable tracing on those subprocess manually.
 | 
			
		||||
 | 
			
		||||
  Trace files are multiprocess safe, so you can have multiple processes
 | 
			
		||||
  outputting to the same tracelog at once.
 | 
			
		||||
 | 
			
		||||
  log_file can be one of three things:
 | 
			
		||||
 | 
			
		||||
    None: a logfile is opened based on sys[argv], namely
 | 
			
		||||
          "./" + sys.argv[0] + ".json"
 | 
			
		||||
 | 
			
		||||
    string: a logfile of the given name is opened.
 | 
			
		||||
 | 
			
		||||
    file-like object: the fileno() is is used. The underlying file descriptor
 | 
			
		||||
                      must support fcntl.lockf() operations.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
trace_disable.__doc__ = """Disables tracing, if enabled.
 | 
			
		||||
 | 
			
		||||
  Will not disable tracing on any existing child proceses that were forked
 | 
			
		||||
  from this process. You must disable them yourself.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
trace_flush.__doc__ = """Flushes any currently-recorded trace data to disk.
 | 
			
		||||
 | 
			
		||||
  trace_event records traces into an in-memory buffer for efficiency. Flushing
 | 
			
		||||
  is only done at process exit or when this method is called.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
trace_is_enabled.__doc__ = """Returns whether tracing is enabled.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
trace_begin.__doc__ = """Records the beginning of an event of the given name.
 | 
			
		||||
 | 
			
		||||
  The building block for performance tracing. A typical example is:
 | 
			
		||||
     from trace_event import *
 | 
			
		||||
     def something_heavy():
 | 
			
		||||
        trace_begin("something_heavy")
 | 
			
		||||
 | 
			
		||||
        trace_begin("read")
 | 
			
		||||
        try:
 | 
			
		||||
          lines = open().readlines()
 | 
			
		||||
        finally:
 | 
			
		||||
          trace_end("read")
 | 
			
		||||
 | 
			
		||||
        trace_begin("parse")
 | 
			
		||||
        try:
 | 
			
		||||
          parse(lines)
 | 
			
		||||
        finally:
 | 
			
		||||
          trace_end("parse")
 | 
			
		||||
 | 
			
		||||
        trace_end("something_heavy")
 | 
			
		||||
 | 
			
		||||
  Note that a trace_end call must be issued for every trace_begin call. When
 | 
			
		||||
  tracing around blocks that might throw exceptions, you should use the trace
 | 
			
		||||
  function, or a try-finally pattern to ensure that the trace_end method is
 | 
			
		||||
  called.
 | 
			
		||||
 | 
			
		||||
  See the documentation for the @traced decorator for a simpler way to
 | 
			
		||||
  instrument functions and methods.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
trace_end.__doc__ = """Records the end of an event of the given name.
 | 
			
		||||
 | 
			
		||||
  See the documentation for trace_begin for more information.
 | 
			
		||||
 | 
			
		||||
  Make sure to issue a trace_end for every trace_begin issued. Failure to pair
 | 
			
		||||
  these calls will lead to bizarrely tall looking traces in the
 | 
			
		||||
  trace_event_viewer UI.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
trace_set_thread_name.__doc__ = """Sets the trace's name for the current thread.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
trace.__doc__ = """Traces a block of code using a with statement.
 | 
			
		||||
 | 
			
		||||
  Example usage:
 | 
			
		||||
    from trace_event import *
 | 
			
		||||
    def something_heavy(lines):
 | 
			
		||||
      with trace("parse_lines", lines=lines):
 | 
			
		||||
        parse(lines)
 | 
			
		||||
 | 
			
		||||
  If tracing an entire function call, prefer the @traced decorator.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
traced.__doc__ = """
 | 
			
		||||
  Traces the provided function, using the function name for the actual generated
 | 
			
		||||
  event.
 | 
			
		||||
 | 
			
		||||
  Prefer this decorator over the explicit trace_begin and trace_end functions
 | 
			
		||||
  whenever you are tracing the start and stop of a function. It automatically
 | 
			
		||||
  issues trace_begin/end events, even when the wrapped function throws.
 | 
			
		||||
 | 
			
		||||
  You can also pass the function's argument names to traced, and the argument
 | 
			
		||||
  values will be added to the trace. Example usage:
 | 
			
		||||
    from trace_event import *
 | 
			
		||||
    @traced("url")
 | 
			
		||||
    def send_request(url):
 | 
			
		||||
      urllib2.urlopen(url).read()
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
clock_sync.__doc__ = """
 | 
			
		||||
  Issues a clock sync marker event.
 | 
			
		||||
 | 
			
		||||
  Clock sync markers are used to synchronize the clock domains of different
 | 
			
		||||
  traces so that they can be used together. It takes a sync_id, and if it is
 | 
			
		||||
  the issuer of a clock sync event it will also require an issue_ts. The
 | 
			
		||||
  issue_ts is a timestamp from when the clocksync was first issued. This is used
 | 
			
		||||
  to calculate the time difference between clock domains.
 | 
			
		||||
  """
 | 
			
		||||
@ -0,0 +1,7 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
from log import *
 | 
			
		||||
from decorators import *
 | 
			
		||||
from meta_class import *
 | 
			
		||||
import multiprocessing_shim
 | 
			
		||||
@ -0,0 +1,87 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
import contextlib
 | 
			
		||||
import inspect
 | 
			
		||||
import time
 | 
			
		||||
import functools
 | 
			
		||||
 | 
			
		||||
import log
 | 
			
		||||
from py_trace_event import trace_time
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@contextlib.contextmanager
 | 
			
		||||
def trace(name, **kwargs):
 | 
			
		||||
  category = "python"
 | 
			
		||||
  start = trace_time.Now()
 | 
			
		||||
  args_to_log = {key: repr(value) for key, value in kwargs.iteritems()}
 | 
			
		||||
  log.add_trace_event("B", start, category, name, args_to_log)
 | 
			
		||||
  try:
 | 
			
		||||
    yield
 | 
			
		||||
  finally:
 | 
			
		||||
    end = trace_time.Now()
 | 
			
		||||
    log.add_trace_event("E", end, category, name)
 | 
			
		||||
 | 
			
		||||
def traced(*args):
 | 
			
		||||
  def get_wrapper(func):
 | 
			
		||||
    if inspect.isgeneratorfunction(func):
 | 
			
		||||
      raise Exception("Can not trace generators.")
 | 
			
		||||
 | 
			
		||||
    category = "python"
 | 
			
		||||
 | 
			
		||||
    arg_spec = inspect.getargspec(func)
 | 
			
		||||
    is_method = arg_spec.args and arg_spec.args[0] == "self"
 | 
			
		||||
 | 
			
		||||
    def arg_spec_tuple(name):
 | 
			
		||||
      arg_index = arg_spec.args.index(name)
 | 
			
		||||
      defaults_length = len(arg_spec.defaults) if arg_spec.defaults else 0
 | 
			
		||||
      default_index = arg_index + defaults_length - len(arg_spec.args)
 | 
			
		||||
      if default_index >= 0:
 | 
			
		||||
        default = arg_spec.defaults[default_index]
 | 
			
		||||
      else:
 | 
			
		||||
        default = None
 | 
			
		||||
      return (name, arg_index, default)
 | 
			
		||||
 | 
			
		||||
    args_to_log = map(arg_spec_tuple, arg_names)
 | 
			
		||||
 | 
			
		||||
    @functools.wraps(func)
 | 
			
		||||
    def traced_function(*args, **kwargs):
 | 
			
		||||
      # Everything outside traced_function is done at decoration-time.
 | 
			
		||||
      # Everything inside traced_function is done at run-time and must be fast.
 | 
			
		||||
      if not log._enabled:  # This check must be at run-time.
 | 
			
		||||
        return func(*args, **kwargs)
 | 
			
		||||
 | 
			
		||||
      def get_arg_value(name, index, default):
 | 
			
		||||
        if name in kwargs:
 | 
			
		||||
          return kwargs[name]
 | 
			
		||||
        elif index < len(args):
 | 
			
		||||
          return args[index]
 | 
			
		||||
        else:
 | 
			
		||||
          return default
 | 
			
		||||
 | 
			
		||||
      if is_method:
 | 
			
		||||
        name = "%s.%s" % (args[0].__class__.__name__, func.__name__)
 | 
			
		||||
      else:
 | 
			
		||||
        name = "%s.%s" % (func.__module__, func.__name__)
 | 
			
		||||
 | 
			
		||||
      # Be sure to repr before calling func. Argument values may change.
 | 
			
		||||
      arg_values = {
 | 
			
		||||
          name: repr(get_arg_value(name, index, default))
 | 
			
		||||
          for name, index, default in args_to_log}
 | 
			
		||||
 | 
			
		||||
      start = trace_time.Now()
 | 
			
		||||
      log.add_trace_event("B", start, category, name, arg_values)
 | 
			
		||||
      try:
 | 
			
		||||
        return func(*args, **kwargs)
 | 
			
		||||
      finally:
 | 
			
		||||
        end = trace_time.Now()
 | 
			
		||||
        log.add_trace_event("E", end, category, name)
 | 
			
		||||
    return traced_function
 | 
			
		||||
 | 
			
		||||
  no_decorator_arguments = len(args) == 1 and callable(args[0])
 | 
			
		||||
  if no_decorator_arguments:
 | 
			
		||||
    arg_names = ()
 | 
			
		||||
    return get_wrapper(args[0])
 | 
			
		||||
  else:
 | 
			
		||||
    arg_names = args
 | 
			
		||||
    return get_wrapper
 | 
			
		||||
@ -0,0 +1,63 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
import decorators
 | 
			
		||||
import logging
 | 
			
		||||
import unittest
 | 
			
		||||
 | 
			
		||||
from trace_test import TraceTest
 | 
			
		||||
#from .trace_test import TraceTest
 | 
			
		||||
 | 
			
		||||
def generator():
 | 
			
		||||
  yield 1
 | 
			
		||||
  yield 2
 | 
			
		||||
 | 
			
		||||
class DecoratorTests(unittest.TestCase):
 | 
			
		||||
  def test_tracing_object_fails(self):
 | 
			
		||||
    self.assertRaises(Exception, lambda: decorators.trace(1))
 | 
			
		||||
    self.assertRaises(Exception, lambda: decorators.trace(""))
 | 
			
		||||
    self.assertRaises(Exception, lambda: decorators.trace([]))
 | 
			
		||||
 | 
			
		||||
  def test_tracing_generators_fail(self):
 | 
			
		||||
    self.assertRaises(Exception, lambda: decorators.trace(generator))
 | 
			
		||||
 | 
			
		||||
class ClassToTest(object):
 | 
			
		||||
  @decorators.traced
 | 
			
		||||
  def method1(self):
 | 
			
		||||
    return 1
 | 
			
		||||
 | 
			
		||||
  @decorators.traced
 | 
			
		||||
  def method2(self):
 | 
			
		||||
    return 1
 | 
			
		||||
 | 
			
		||||
@decorators.traced
 | 
			
		||||
def traced_func():
 | 
			
		||||
  return 1
 | 
			
		||||
 | 
			
		||||
class DecoratorTests(TraceTest):
 | 
			
		||||
  def _get_decorated_method_name(self, f):
 | 
			
		||||
    res = self.go(f)
 | 
			
		||||
    events = res.findEventsOnThread(res.findThreadIds()[0])
 | 
			
		||||
 | 
			
		||||
    # Sanity checks.
 | 
			
		||||
    self.assertEquals(2, len(events))
 | 
			
		||||
    self.assertEquals(events[0]["name"], events[1]["name"])
 | 
			
		||||
    return events[1]["name"]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  def test_func_names_work(self):
 | 
			
		||||
    expected_method_name = __name__ + '.traced_func'
 | 
			
		||||
    self.assertEquals(expected_method_name,
 | 
			
		||||
                      self._get_decorated_method_name(traced_func))
 | 
			
		||||
 | 
			
		||||
  def test_method_names_work(self):
 | 
			
		||||
    ctt = ClassToTest()
 | 
			
		||||
    self.assertEquals('ClassToTest.method1',
 | 
			
		||||
                      self._get_decorated_method_name(ctt.method1))
 | 
			
		||||
    self.assertEquals('ClassToTest.method2',
 | 
			
		||||
                      self._get_decorated_method_name(ctt.method2))
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  logging.getLogger().setLevel(logging.DEBUG)
 | 
			
		||||
  unittest.main(verbosity=2)
 | 
			
		||||
@ -0,0 +1,364 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
import atexit
 | 
			
		||||
import json
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
import time
 | 
			
		||||
import threading
 | 
			
		||||
import multiprocessing
 | 
			
		||||
import multiprocessing_shim
 | 
			
		||||
 | 
			
		||||
from py_trace_event.trace_event_impl import perfetto_trace_writer
 | 
			
		||||
from py_trace_event import trace_time
 | 
			
		||||
 | 
			
		||||
from py_utils import lock
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Trace file formats:
 | 
			
		||||
 | 
			
		||||
# Legacy format: json list of events.
 | 
			
		||||
# Events can be written from multiple processes, but since no process
 | 
			
		||||
# can be sure that it is the last one, nobody writes the closing ']'.
 | 
			
		||||
# So the resulting file is not technically correct json.
 | 
			
		||||
JSON = "json"
 | 
			
		||||
 | 
			
		||||
# Full json with events and metadata.
 | 
			
		||||
# This format produces correct json ready to feed into TraceDataBuilder.
 | 
			
		||||
# Note that it is the responsibility of the user of py_trace_event to make sure
 | 
			
		||||
# that trace_disable() is called after all child processes have finished.
 | 
			
		||||
JSON_WITH_METADATA = "json_with_metadata"
 | 
			
		||||
 | 
			
		||||
# Perfetto protobuf trace format.
 | 
			
		||||
PROTOBUF = "protobuf"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_lock = threading.Lock()
 | 
			
		||||
 | 
			
		||||
_enabled = False
 | 
			
		||||
_log_file = None
 | 
			
		||||
 | 
			
		||||
_cur_events = [] # events that have yet to be buffered
 | 
			
		||||
_benchmark_metadata = {}
 | 
			
		||||
 | 
			
		||||
_tls = threading.local() # tls used to detect forking/etc
 | 
			
		||||
_atexit_regsitered_for_pid = None
 | 
			
		||||
 | 
			
		||||
_control_allowed = True
 | 
			
		||||
 | 
			
		||||
_original_multiprocessing_process = multiprocessing.Process
 | 
			
		||||
 | 
			
		||||
class TraceException(Exception):
 | 
			
		||||
  pass
 | 
			
		||||
 | 
			
		||||
def _note(msg, *args):
 | 
			
		||||
  pass
 | 
			
		||||
#  print "%i: %s" % (os.getpid(), msg)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _locked(fn):
 | 
			
		||||
  def locked_fn(*args,**kwargs):
 | 
			
		||||
    _lock.acquire()
 | 
			
		||||
    try:
 | 
			
		||||
      ret = fn(*args,**kwargs)
 | 
			
		||||
    finally:
 | 
			
		||||
      _lock.release()
 | 
			
		||||
    return ret
 | 
			
		||||
  return locked_fn
 | 
			
		||||
 | 
			
		||||
def _disallow_tracing_control():
 | 
			
		||||
  global _control_allowed
 | 
			
		||||
  _control_allowed = False
 | 
			
		||||
 | 
			
		||||
def trace_enable(log_file=None, format=None):
 | 
			
		||||
  """ Enable tracing.
 | 
			
		||||
 | 
			
		||||
  Args:
 | 
			
		||||
    log_file: file to write trace into. Can be a file-like object,
 | 
			
		||||
      a name of file, or None. If None, file name is constructed
 | 
			
		||||
      from executable name.
 | 
			
		||||
    format: trace file format. See trace_event.py for available options.
 | 
			
		||||
  """
 | 
			
		||||
  if format is None:
 | 
			
		||||
    format = JSON
 | 
			
		||||
  _trace_enable(log_file, format)
 | 
			
		||||
 | 
			
		||||
def _write_header():
 | 
			
		||||
  tid = threading.current_thread().ident
 | 
			
		||||
  if not tid:
 | 
			
		||||
    tid = os.getpid()
 | 
			
		||||
 | 
			
		||||
  if _format == PROTOBUF:
 | 
			
		||||
    tid = threading.current_thread().ident
 | 
			
		||||
    perfetto_trace_writer.write_thread_descriptor_event(
 | 
			
		||||
        output=_log_file,
 | 
			
		||||
        pid=os.getpid(),
 | 
			
		||||
        tid=tid,
 | 
			
		||||
        ts=trace_time.Now(),
 | 
			
		||||
    )
 | 
			
		||||
    perfetto_trace_writer.write_event(
 | 
			
		||||
        output=_log_file,
 | 
			
		||||
        ph="M",
 | 
			
		||||
        category="process_argv",
 | 
			
		||||
        name="process_argv",
 | 
			
		||||
        ts=trace_time.Now(),
 | 
			
		||||
        args=sys.argv,
 | 
			
		||||
        tid=tid,
 | 
			
		||||
    )
 | 
			
		||||
  else:
 | 
			
		||||
    if _format == JSON:
 | 
			
		||||
      _log_file.write('[')
 | 
			
		||||
    elif _format == JSON_WITH_METADATA:
 | 
			
		||||
      _log_file.write('{"traceEvents": [\n')
 | 
			
		||||
    else:
 | 
			
		||||
      raise TraceException("Unknown format: %s" % _format)
 | 
			
		||||
    json.dump({
 | 
			
		||||
        "ph": "M",
 | 
			
		||||
        "category": "process_argv",
 | 
			
		||||
        "pid": os.getpid(),
 | 
			
		||||
        "tid": threading.current_thread().ident,
 | 
			
		||||
        "ts": trace_time.Now(),
 | 
			
		||||
        "name": "process_argv",
 | 
			
		||||
        "args": {"argv": sys.argv},
 | 
			
		||||
    }, _log_file)
 | 
			
		||||
    _log_file.write('\n')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@_locked
 | 
			
		||||
def _trace_enable(log_file=None, format=None):
 | 
			
		||||
  global _format
 | 
			
		||||
  _format = format
 | 
			
		||||
  global _enabled
 | 
			
		||||
  if _enabled:
 | 
			
		||||
    raise TraceException("Already enabled")
 | 
			
		||||
  if not _control_allowed:
 | 
			
		||||
    raise TraceException("Tracing control not allowed in child processes.")
 | 
			
		||||
  _enabled = True
 | 
			
		||||
  global _log_file
 | 
			
		||||
  if log_file == None:
 | 
			
		||||
    if sys.argv[0] == '':
 | 
			
		||||
      n = 'trace_event'
 | 
			
		||||
    else:
 | 
			
		||||
      n = sys.argv[0]
 | 
			
		||||
    if _format == PROTOBUF:
 | 
			
		||||
      log_file = open("%s.pb" % n, "ab", False)
 | 
			
		||||
    else:
 | 
			
		||||
      log_file = open("%s.json" % n, "ab", False)
 | 
			
		||||
  elif isinstance(log_file, basestring):
 | 
			
		||||
    log_file = open("%s" % log_file, "ab", False)
 | 
			
		||||
  elif not hasattr(log_file, 'fileno'):
 | 
			
		||||
    raise TraceException(
 | 
			
		||||
        "Log file must be None, a string, or file-like object with a fileno()")
 | 
			
		||||
 | 
			
		||||
  _note("trace_event: tracelog name is %s" % log_file)
 | 
			
		||||
 | 
			
		||||
  _log_file = log_file
 | 
			
		||||
  with lock.FileLock(_log_file, lock.LOCK_EX):
 | 
			
		||||
    _log_file.seek(0, os.SEEK_END)
 | 
			
		||||
 | 
			
		||||
    lastpos = _log_file.tell()
 | 
			
		||||
    creator = lastpos == 0
 | 
			
		||||
    if creator:
 | 
			
		||||
      _note("trace_event: Opened new tracelog, lastpos=%i", lastpos)
 | 
			
		||||
      _write_header()
 | 
			
		||||
    else:
 | 
			
		||||
      _note("trace_event: Opened existing tracelog")
 | 
			
		||||
    _log_file.flush()
 | 
			
		||||
  # Monkeypatch in our process replacement for the multiprocessing.Process class
 | 
			
		||||
  if multiprocessing.Process != multiprocessing_shim.ProcessShim:
 | 
			
		||||
      multiprocessing.Process = multiprocessing_shim.ProcessShim
 | 
			
		||||
 | 
			
		||||
@_locked
 | 
			
		||||
def trace_flush():
 | 
			
		||||
  if _enabled:
 | 
			
		||||
    _flush()
 | 
			
		||||
 | 
			
		||||
@_locked
 | 
			
		||||
def trace_disable():
 | 
			
		||||
  global _enabled
 | 
			
		||||
  if not _control_allowed:
 | 
			
		||||
    raise TraceException("Tracing control not allowed in child processes.")
 | 
			
		||||
  if not _enabled:
 | 
			
		||||
    return
 | 
			
		||||
  _enabled = False
 | 
			
		||||
  _flush(close=True)
 | 
			
		||||
  multiprocessing.Process = _original_multiprocessing_process
 | 
			
		||||
 | 
			
		||||
def _write_cur_events():
 | 
			
		||||
  if _format == PROTOBUF:
 | 
			
		||||
    for e in _cur_events:
 | 
			
		||||
      perfetto_trace_writer.write_event(
 | 
			
		||||
          output=_log_file,
 | 
			
		||||
          ph=e["ph"],
 | 
			
		||||
          category=e["category"],
 | 
			
		||||
          name=e["name"],
 | 
			
		||||
          ts=e["ts"],
 | 
			
		||||
          args=e["args"],
 | 
			
		||||
          tid=threading.current_thread().ident,
 | 
			
		||||
      )
 | 
			
		||||
  elif _format in (JSON, JSON_WITH_METADATA):
 | 
			
		||||
    for e in _cur_events:
 | 
			
		||||
      _log_file.write(",\n")
 | 
			
		||||
      json.dump(e, _log_file)
 | 
			
		||||
  else:
 | 
			
		||||
    raise TraceException("Unknown format: %s" % _format)
 | 
			
		||||
  del _cur_events[:]
 | 
			
		||||
 | 
			
		||||
def _write_footer():
 | 
			
		||||
  if _format in [JSON, PROTOBUF]:
 | 
			
		||||
    # In JSON format we might not be the only process writing to this logfile.
 | 
			
		||||
    # So, we will simply close the file rather than writing the trailing ] that
 | 
			
		||||
    # it technically requires. The trace viewer understands this and
 | 
			
		||||
    # will insert a trailing ] during loading.
 | 
			
		||||
    # In PROTOBUF format there's no need for a footer. The metadata has already
 | 
			
		||||
    # been written in a special proto message.
 | 
			
		||||
    pass
 | 
			
		||||
  elif _format == JSON_WITH_METADATA:
 | 
			
		||||
    _log_file.write('],\n"metadata": ')
 | 
			
		||||
    json.dump(_benchmark_metadata, _log_file)
 | 
			
		||||
    _log_file.write('}')
 | 
			
		||||
  else:
 | 
			
		||||
    raise TraceException("Unknown format: %s" % _format)
 | 
			
		||||
 | 
			
		||||
def _flush(close=False):
 | 
			
		||||
  global _log_file
 | 
			
		||||
  with lock.FileLock(_log_file, lock.LOCK_EX):
 | 
			
		||||
    _log_file.seek(0, os.SEEK_END)
 | 
			
		||||
    if len(_cur_events):
 | 
			
		||||
      _write_cur_events()
 | 
			
		||||
    if close:
 | 
			
		||||
      _write_footer()
 | 
			
		||||
    _log_file.flush()
 | 
			
		||||
 | 
			
		||||
  if close:
 | 
			
		||||
    _note("trace_event: Closed")
 | 
			
		||||
    _log_file.close()
 | 
			
		||||
    _log_file = None
 | 
			
		||||
  else:
 | 
			
		||||
    _note("trace_event: Flushed")
 | 
			
		||||
 | 
			
		||||
@_locked
 | 
			
		||||
def trace_is_enabled():
 | 
			
		||||
  return _enabled
 | 
			
		||||
 | 
			
		||||
@_locked
 | 
			
		||||
def add_trace_event(ph, ts, category, name, args=None):
 | 
			
		||||
  global _enabled
 | 
			
		||||
  if not _enabled:
 | 
			
		||||
    return
 | 
			
		||||
  if not hasattr(_tls, 'pid') or _tls.pid != os.getpid():
 | 
			
		||||
    _tls.pid = os.getpid()
 | 
			
		||||
    global _atexit_regsitered_for_pid
 | 
			
		||||
    if _tls.pid != _atexit_regsitered_for_pid:
 | 
			
		||||
      _atexit_regsitered_for_pid = _tls.pid
 | 
			
		||||
      atexit.register(_trace_disable_atexit)
 | 
			
		||||
      _tls.pid = os.getpid()
 | 
			
		||||
      del _cur_events[:] # we forked, clear the event buffer!
 | 
			
		||||
    tid = threading.current_thread().ident
 | 
			
		||||
    if not tid:
 | 
			
		||||
      tid = os.getpid()
 | 
			
		||||
    _tls.tid = tid
 | 
			
		||||
 | 
			
		||||
  _cur_events.append({
 | 
			
		||||
      "ph": ph,
 | 
			
		||||
      "category": category,
 | 
			
		||||
      "pid": _tls.pid,
 | 
			
		||||
      "tid": _tls.tid,
 | 
			
		||||
      "ts": ts,
 | 
			
		||||
      "name": name,
 | 
			
		||||
      "args": args or {},
 | 
			
		||||
  });
 | 
			
		||||
 | 
			
		||||
def trace_begin(name, args=None):
 | 
			
		||||
  add_trace_event("B", trace_time.Now(), "python", name, args)
 | 
			
		||||
 | 
			
		||||
def trace_end(name, args=None):
 | 
			
		||||
  add_trace_event("E", trace_time.Now(), "python", name, args)
 | 
			
		||||
 | 
			
		||||
def trace_set_thread_name(thread_name):
 | 
			
		||||
  add_trace_event("M", trace_time.Now(), "__metadata", "thread_name",
 | 
			
		||||
                  {"name": thread_name})
 | 
			
		||||
 | 
			
		||||
def trace_add_benchmark_metadata(
 | 
			
		||||
    benchmark_start_time_us,
 | 
			
		||||
    story_run_time_us,
 | 
			
		||||
    benchmark_name,
 | 
			
		||||
    benchmark_description,
 | 
			
		||||
    story_name,
 | 
			
		||||
    story_tags,
 | 
			
		||||
    story_run_index,
 | 
			
		||||
    label=None,
 | 
			
		||||
    had_failures=None,
 | 
			
		||||
):
 | 
			
		||||
  """ Add benchmark metadata to be written to trace file.
 | 
			
		||||
 | 
			
		||||
  Args:
 | 
			
		||||
    benchmark_start_time_us: Benchmark start time in microseconds.
 | 
			
		||||
    story_run_time_us: Story start time in microseconds.
 | 
			
		||||
    benchmark_name: Name of the benchmark.
 | 
			
		||||
    benchmark_description: Description of the benchmark.
 | 
			
		||||
    story_name: Name of the story.
 | 
			
		||||
    story_tags: List of story tags.
 | 
			
		||||
    story_run_index: Index of the story run.
 | 
			
		||||
    label: Optional label.
 | 
			
		||||
    had_failures: Whether this story run failed.
 | 
			
		||||
  """
 | 
			
		||||
  global _benchmark_metadata
 | 
			
		||||
  if _format == PROTOBUF:
 | 
			
		||||
    # Write metadata immediately.
 | 
			
		||||
    perfetto_trace_writer.write_metadata(
 | 
			
		||||
        output=_log_file,
 | 
			
		||||
        benchmark_start_time_us=benchmark_start_time_us,
 | 
			
		||||
        story_run_time_us=story_run_time_us,
 | 
			
		||||
        benchmark_name=benchmark_name,
 | 
			
		||||
        benchmark_description=benchmark_description,
 | 
			
		||||
        story_name=story_name,
 | 
			
		||||
        story_tags=story_tags,
 | 
			
		||||
        story_run_index=story_run_index,
 | 
			
		||||
        label=label,
 | 
			
		||||
        had_failures=had_failures,
 | 
			
		||||
    )
 | 
			
		||||
  elif _format == JSON_WITH_METADATA:
 | 
			
		||||
    # Store metadata to write it in the footer.
 | 
			
		||||
    telemetry_metadata_for_json = {
 | 
			
		||||
        "benchmarkStart": benchmark_start_time_us / 1000.0,
 | 
			
		||||
        "traceStart": story_run_time_us / 1000.0,
 | 
			
		||||
        "benchmarks": [benchmark_name],
 | 
			
		||||
        "benchmarkDescriptions": [benchmark_description],
 | 
			
		||||
        "stories": [story_name],
 | 
			
		||||
        "storyTags": story_tags,
 | 
			
		||||
        "storysetRepeats": [story_run_index],
 | 
			
		||||
    }
 | 
			
		||||
    if label:
 | 
			
		||||
      telemetry_metadata_for_json["labels"] = [label]
 | 
			
		||||
    if had_failures:
 | 
			
		||||
      telemetry_metadata_for_json["hadFailures"] = [had_failures]
 | 
			
		||||
 | 
			
		||||
    _benchmark_metadata = {
 | 
			
		||||
        # TODO(crbug.com/948633): For right now, we use "TELEMETRY" as the
 | 
			
		||||
        # clock domain to guarantee that Telemetry is given its own clock
 | 
			
		||||
        # domain. Telemetry isn't really a clock domain, though: it's a
 | 
			
		||||
        # system that USES a clock domain like LINUX_CLOCK_MONOTONIC or
 | 
			
		||||
        # WIN_QPC. However, there's a chance that a Telemetry controller
 | 
			
		||||
        # running on Linux (using LINUX_CLOCK_MONOTONIC) is interacting
 | 
			
		||||
        # with an Android phone (also using LINUX_CLOCK_MONOTONIC, but
 | 
			
		||||
        # on a different machine). The current logic collapses clock
 | 
			
		||||
        # domains based solely on the clock domain string, but we really
 | 
			
		||||
        # should to collapse based on some (device ID, clock domain ID)
 | 
			
		||||
        # tuple. Giving Telemetry its own clock domain is a work-around
 | 
			
		||||
        # for this.
 | 
			
		||||
        "clock-domain": "TELEMETRY",
 | 
			
		||||
        "telemetry": telemetry_metadata_for_json,
 | 
			
		||||
    }
 | 
			
		||||
  elif _format == JSON:
 | 
			
		||||
    raise TraceException("Can't write metadata in JSON format")
 | 
			
		||||
  else:
 | 
			
		||||
    raise TraceException("Unknown format: %s" % _format)
 | 
			
		||||
 | 
			
		||||
def _trace_disable_atexit():
 | 
			
		||||
  trace_disable()
 | 
			
		||||
 | 
			
		||||
def is_tracing_controllable():
 | 
			
		||||
  global _control_allowed
 | 
			
		||||
  return _control_allowed
 | 
			
		||||
@ -0,0 +1,46 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
import logging
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
import unittest
 | 
			
		||||
 | 
			
		||||
from log import *
 | 
			
		||||
from parsed_trace_events import *
 | 
			
		||||
from py_utils import tempfile_ext
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LogIOTest(unittest.TestCase):
 | 
			
		||||
  def test_enable_with_file(self):
 | 
			
		||||
    with tempfile_ext.TemporaryFileName() as filename:
 | 
			
		||||
      trace_enable(open(filename, 'w+'))
 | 
			
		||||
      trace_disable()
 | 
			
		||||
      e = ParsedTraceEvents(trace_filename=filename)
 | 
			
		||||
      self.assertTrue(len(e) > 0)
 | 
			
		||||
 | 
			
		||||
  def test_enable_with_filename(self):
 | 
			
		||||
    with tempfile_ext.TemporaryFileName() as filename:
 | 
			
		||||
      trace_enable(filename)
 | 
			
		||||
      trace_disable()
 | 
			
		||||
      e = ParsedTraceEvents(trace_filename=filename)
 | 
			
		||||
      self.assertTrue(len(e) > 0)
 | 
			
		||||
 | 
			
		||||
  def test_enable_with_implicit_filename(self):
 | 
			
		||||
    expected_filename = "%s.json" % sys.argv[0]
 | 
			
		||||
    def do_work():
 | 
			
		||||
      trace_enable()
 | 
			
		||||
      trace_disable()
 | 
			
		||||
      e = ParsedTraceEvents(trace_filename=expected_filename)
 | 
			
		||||
      self.assertTrue(len(e) > 0)
 | 
			
		||||
    try:
 | 
			
		||||
      do_work()
 | 
			
		||||
    finally:
 | 
			
		||||
      if os.path.exists(expected_filename):
 | 
			
		||||
        os.unlink(expected_filename)
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  logging.getLogger().setLevel(logging.DEBUG)
 | 
			
		||||
  unittest.main(verbosity=2)
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,17 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import types
 | 
			
		||||
 | 
			
		||||
from py_trace_event.trace_event_impl import decorators
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TracedMetaClass(type):
 | 
			
		||||
  def __new__(cls, name, bases, attrs):
 | 
			
		||||
    for attr_name, attr_value in attrs.iteritems():
 | 
			
		||||
      if (not attr_name.startswith('_') and
 | 
			
		||||
          isinstance(attr_value, types.FunctionType)):
 | 
			
		||||
        attrs[attr_name] = decorators.traced(attr_value)
 | 
			
		||||
 | 
			
		||||
    return super(TracedMetaClass, cls).__new__(cls, name, bases, attrs)
 | 
			
		||||
@ -0,0 +1,88 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
import multiprocessing
 | 
			
		||||
import log
 | 
			
		||||
import time
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_RealProcess = multiprocessing.Process
 | 
			
		||||
__all__ = []
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ProcessSubclass(_RealProcess):
 | 
			
		||||
  def __init__(self, shim, *args, **kwards):
 | 
			
		||||
    _RealProcess.__init__(self, *args, **kwards)
 | 
			
		||||
    self._shim = shim
 | 
			
		||||
 | 
			
		||||
  def run(self,*args,**kwargs):
 | 
			
		||||
    log._disallow_tracing_control()
 | 
			
		||||
    try:
 | 
			
		||||
      r = _RealProcess.run(self, *args, **kwargs)
 | 
			
		||||
    finally:
 | 
			
		||||
      if log.trace_is_enabled():
 | 
			
		||||
        log.trace_flush() # todo, reduce need for this...
 | 
			
		||||
    return r
 | 
			
		||||
 | 
			
		||||
class ProcessShim():
 | 
			
		||||
  def __init__(self, group=None, target=None, name=None, args=(), kwargs={}):
 | 
			
		||||
    self._proc = ProcessSubclass(self, group, target, name, args, kwargs)
 | 
			
		||||
    # hint to testing code that the shimming worked
 | 
			
		||||
    self._shimmed_by_trace_event = True
 | 
			
		||||
 | 
			
		||||
  def run(self):
 | 
			
		||||
    self._proc.run()
 | 
			
		||||
 | 
			
		||||
  def start(self):
 | 
			
		||||
    self._proc.start()
 | 
			
		||||
 | 
			
		||||
  def terminate(self):
 | 
			
		||||
    if log.trace_is_enabled():
 | 
			
		||||
      # give the flush a chance to finish --> TODO: find some other way.
 | 
			
		||||
      time.sleep(0.25)
 | 
			
		||||
    self._proc.terminate()
 | 
			
		||||
 | 
			
		||||
  def join(self, timeout=None):
 | 
			
		||||
    self._proc.join( timeout)
 | 
			
		||||
 | 
			
		||||
  def is_alive(self):
 | 
			
		||||
    return self._proc.is_alive()
 | 
			
		||||
 | 
			
		||||
  @property
 | 
			
		||||
  def name(self):
 | 
			
		||||
    return self._proc.name
 | 
			
		||||
 | 
			
		||||
  @name.setter
 | 
			
		||||
  def name(self, name):
 | 
			
		||||
    self._proc.name = name
 | 
			
		||||
 | 
			
		||||
  @property
 | 
			
		||||
  def daemon(self):
 | 
			
		||||
    return self._proc.daemon
 | 
			
		||||
 | 
			
		||||
  @daemon.setter
 | 
			
		||||
  def daemon(self, daemonic):
 | 
			
		||||
    self._proc.daemon = daemonic
 | 
			
		||||
 | 
			
		||||
  @property
 | 
			
		||||
  def authkey(self):
 | 
			
		||||
    return self._proc._authkey
 | 
			
		||||
 | 
			
		||||
  @authkey.setter
 | 
			
		||||
  def authkey(self, authkey):
 | 
			
		||||
    self._proc.authkey = AuthenticationString(authkey)
 | 
			
		||||
 | 
			
		||||
  @property
 | 
			
		||||
  def exitcode(self):
 | 
			
		||||
    return self._proc.exitcode
 | 
			
		||||
 | 
			
		||||
  @property
 | 
			
		||||
  def ident(self):
 | 
			
		||||
    return self._proc.ident
 | 
			
		||||
 | 
			
		||||
  @property
 | 
			
		||||
  def pid(self):
 | 
			
		||||
    return self._proc.pid
 | 
			
		||||
 | 
			
		||||
  def __repr__(self):
 | 
			
		||||
    return self._proc.__repr__()
 | 
			
		||||
@ -0,0 +1,98 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
import math
 | 
			
		||||
import json
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ParsedTraceEvents(object):
 | 
			
		||||
  def __init__(self, events = None, trace_filename = None):
 | 
			
		||||
    """
 | 
			
		||||
    Utility class for filtering and manipulating trace data.
 | 
			
		||||
 | 
			
		||||
    events -- An iterable object containing trace events
 | 
			
		||||
    trace_filename -- A file object that contains a complete trace.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    if trace_filename and events:
 | 
			
		||||
      raise Exception("Provide either a trace file or event list")
 | 
			
		||||
    if not trace_filename and events == None:
 | 
			
		||||
      raise Exception("Provide either a trace file or event list")
 | 
			
		||||
 | 
			
		||||
    if trace_filename:
 | 
			
		||||
      f = open(trace_filename, 'r')
 | 
			
		||||
      t = f.read()
 | 
			
		||||
      f.close()
 | 
			
		||||
 | 
			
		||||
      # If the event data begins with a [, then we know it should end with a ].
 | 
			
		||||
      # The reason we check for this is because some tracing implementations
 | 
			
		||||
      # cannot guarantee that a ']' gets written to the trace file. So, we are
 | 
			
		||||
      # forgiving and if this is obviously the case, we fix it up before
 | 
			
		||||
      # throwing the string at JSON.parse.
 | 
			
		||||
      if t[0] == '[':
 | 
			
		||||
        n = len(t);
 | 
			
		||||
        if t[n - 1] != ']' and t[n - 1] != '\n':
 | 
			
		||||
          t = t + ']'
 | 
			
		||||
        elif t[n - 2] != ']' and t[n - 1] == '\n':
 | 
			
		||||
          t = t + ']'
 | 
			
		||||
        elif t[n - 3] != ']' and t[n - 2] == '\r' and t[n - 1] == '\n':
 | 
			
		||||
          t = t + ']'
 | 
			
		||||
 | 
			
		||||
      try:
 | 
			
		||||
        events = json.loads(t)
 | 
			
		||||
      except ValueError:
 | 
			
		||||
        raise Exception("Corrupt trace, did not parse. Value: %s" % t)
 | 
			
		||||
 | 
			
		||||
      if 'traceEvents' in events:
 | 
			
		||||
        events = events['traceEvents']
 | 
			
		||||
 | 
			
		||||
    if not hasattr(events, '__iter__'):
 | 
			
		||||
      raise Exception, 'events must be iteraable.'
 | 
			
		||||
    self.events = events
 | 
			
		||||
    self.pids = None
 | 
			
		||||
    self.tids = None
 | 
			
		||||
 | 
			
		||||
  def __len__(self):
 | 
			
		||||
    return len(self.events)
 | 
			
		||||
 | 
			
		||||
  def __getitem__(self, i):
 | 
			
		||||
    return self.events[i]
 | 
			
		||||
 | 
			
		||||
  def __setitem__(self, i, v):
 | 
			
		||||
    self.events[i] = v
 | 
			
		||||
 | 
			
		||||
  def __repr__(self):
 | 
			
		||||
    return "[%s]" % ",\n ".join([repr(e) for e in self.events])
 | 
			
		||||
 | 
			
		||||
  def findProcessIds(self):
 | 
			
		||||
    if self.pids:
 | 
			
		||||
      return self.pids
 | 
			
		||||
    pids = set()
 | 
			
		||||
    for e in self.events:
 | 
			
		||||
      if "pid" in e and e["pid"]:
 | 
			
		||||
        pids.add(e["pid"])
 | 
			
		||||
    self.pids = list(pids)
 | 
			
		||||
    return self.pids
 | 
			
		||||
 | 
			
		||||
  def findThreadIds(self):
 | 
			
		||||
    if self.tids:
 | 
			
		||||
      return self.tids
 | 
			
		||||
    tids = set()
 | 
			
		||||
    for e in self.events:
 | 
			
		||||
      if "tid" in e and e["tid"]:
 | 
			
		||||
        tids.add(e["tid"])
 | 
			
		||||
    self.tids = list(tids)
 | 
			
		||||
    return self.tids
 | 
			
		||||
 | 
			
		||||
  def findEventsOnProcess(self, pid):
 | 
			
		||||
    return ParsedTraceEvents([e for e in self.events if e["pid"] == pid])
 | 
			
		||||
 | 
			
		||||
  def findEventsOnThread(self, tid):
 | 
			
		||||
    return ParsedTraceEvents(
 | 
			
		||||
        [e for e in self.events if e["ph"] != "M" and e["tid"] == tid])
 | 
			
		||||
 | 
			
		||||
  def findByPhase(self, ph):
 | 
			
		||||
    return ParsedTraceEvents([e for e in self.events if e["ph"] == ph])
 | 
			
		||||
 | 
			
		||||
  def findByName(self, n):
 | 
			
		||||
    return ParsedTraceEvents([e for e in self.events if e["name"] == n])
 | 
			
		||||
@ -0,0 +1,222 @@
 | 
			
		||||
# Copyright 2019 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
""" Classes representing perfetto trace protobuf messages.
 | 
			
		||||
 | 
			
		||||
This module makes use of neither python-protobuf library nor python classes
 | 
			
		||||
compiled from .proto definitions, because currently there's no way to
 | 
			
		||||
deploy those to all the places where telemetry is run.
 | 
			
		||||
 | 
			
		||||
TODO(crbug.com/944078): Remove this module after the python-protobuf library
 | 
			
		||||
is deployed to all the bots.
 | 
			
		||||
 | 
			
		||||
Definitions of perfetto messages can be found here:
 | 
			
		||||
https://android.googlesource.com/platform/external/perfetto/+/refs/heads/master/protos/perfetto/trace/
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import encoder
 | 
			
		||||
import wire_format
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TracePacket(object):
 | 
			
		||||
  def __init__(self):
 | 
			
		||||
    self.interned_data = None
 | 
			
		||||
    self.thread_descriptor = None
 | 
			
		||||
    self.incremental_state_cleared = None
 | 
			
		||||
    self.track_event = None
 | 
			
		||||
    self.trusted_packet_sequence_id = None
 | 
			
		||||
    self.chrome_benchmark_metadata = None
 | 
			
		||||
 | 
			
		||||
  def encode(self):
 | 
			
		||||
    parts = []
 | 
			
		||||
    if self.trusted_packet_sequence_id is not None:
 | 
			
		||||
      writer = encoder.UInt32Encoder(10, False, False)
 | 
			
		||||
      writer(parts.append, self.trusted_packet_sequence_id)
 | 
			
		||||
    if self.track_event is not None:
 | 
			
		||||
      tag = encoder.TagBytes(11, wire_format.WIRETYPE_LENGTH_DELIMITED)
 | 
			
		||||
      data = self.track_event.encode()
 | 
			
		||||
      length = encoder._VarintBytes(len(data))
 | 
			
		||||
      parts += [tag, length, data]
 | 
			
		||||
    if self.interned_data is not None:
 | 
			
		||||
      tag = encoder.TagBytes(12, wire_format.WIRETYPE_LENGTH_DELIMITED)
 | 
			
		||||
      data = self.interned_data.encode()
 | 
			
		||||
      length = encoder._VarintBytes(len(data))
 | 
			
		||||
      parts += [tag, length, data]
 | 
			
		||||
    if self.incremental_state_cleared is not None:
 | 
			
		||||
      writer = encoder.BoolEncoder(41, False, False)
 | 
			
		||||
      writer(parts.append, self.incremental_state_cleared)
 | 
			
		||||
    if self.thread_descriptor is not None:
 | 
			
		||||
      tag = encoder.TagBytes(44, wire_format.WIRETYPE_LENGTH_DELIMITED)
 | 
			
		||||
      data = self.thread_descriptor.encode()
 | 
			
		||||
      length = encoder._VarintBytes(len(data))
 | 
			
		||||
      parts += [tag, length, data]
 | 
			
		||||
    if self.chrome_benchmark_metadata is not None:
 | 
			
		||||
      tag = encoder.TagBytes(48, wire_format.WIRETYPE_LENGTH_DELIMITED)
 | 
			
		||||
      data = self.chrome_benchmark_metadata.encode()
 | 
			
		||||
      length = encoder._VarintBytes(len(data))
 | 
			
		||||
      parts += [tag, length, data]
 | 
			
		||||
 | 
			
		||||
    return b"".join(parts)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class InternedData(object):
 | 
			
		||||
  def __init__(self):
 | 
			
		||||
    self.event_category = None
 | 
			
		||||
    self.legacy_event_name = None
 | 
			
		||||
 | 
			
		||||
  def encode(self):
 | 
			
		||||
    parts = []
 | 
			
		||||
    if self.event_category is not None:
 | 
			
		||||
      tag = encoder.TagBytes(1, wire_format.WIRETYPE_LENGTH_DELIMITED)
 | 
			
		||||
      data = self.event_category.encode()
 | 
			
		||||
      length = encoder._VarintBytes(len(data))
 | 
			
		||||
      parts += [tag, length, data]
 | 
			
		||||
    if self.legacy_event_name is not None:
 | 
			
		||||
      tag = encoder.TagBytes(2, wire_format.WIRETYPE_LENGTH_DELIMITED)
 | 
			
		||||
      data = self.legacy_event_name.encode()
 | 
			
		||||
      length = encoder._VarintBytes(len(data))
 | 
			
		||||
      parts += [tag, length, data]
 | 
			
		||||
 | 
			
		||||
    return b"".join(parts)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class EventCategory(object):
 | 
			
		||||
  def __init__(self):
 | 
			
		||||
    self.iid = None
 | 
			
		||||
    self.name = None
 | 
			
		||||
 | 
			
		||||
  def encode(self):
 | 
			
		||||
    if (self.iid is None or self.name is None):
 | 
			
		||||
      raise RuntimeError("Missing mandatory fields.")
 | 
			
		||||
 | 
			
		||||
    parts = []
 | 
			
		||||
    writer = encoder.UInt32Encoder(1, False, False)
 | 
			
		||||
    writer(parts.append, self.iid)
 | 
			
		||||
    writer = encoder.StringEncoder(2, False, False)
 | 
			
		||||
    writer(parts.append, self.name)
 | 
			
		||||
 | 
			
		||||
    return b"".join(parts)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
LegacyEventName = EventCategory
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ThreadDescriptor(object):
 | 
			
		||||
  def __init__(self):
 | 
			
		||||
    self.pid = None
 | 
			
		||||
    self.tid = None
 | 
			
		||||
    self.reference_timestamp_us = None
 | 
			
		||||
 | 
			
		||||
  def encode(self):
 | 
			
		||||
    if (self.pid is None or self.tid is None or
 | 
			
		||||
        self.reference_timestamp_us is None):
 | 
			
		||||
      raise RuntimeError("Missing mandatory fields.")
 | 
			
		||||
 | 
			
		||||
    parts = []
 | 
			
		||||
    writer = encoder.UInt32Encoder(1, False, False)
 | 
			
		||||
    writer(parts.append, self.pid)
 | 
			
		||||
    writer = encoder.UInt32Encoder(2, False, False)
 | 
			
		||||
    writer(parts.append, self.tid)
 | 
			
		||||
    writer = encoder.Int64Encoder(6, False, False)
 | 
			
		||||
    writer(parts.append, self.reference_timestamp_us)
 | 
			
		||||
 | 
			
		||||
    return b"".join(parts)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TrackEvent(object):
 | 
			
		||||
  def __init__(self):
 | 
			
		||||
    self.timestamp_absolute_us = None
 | 
			
		||||
    self.timestamp_delta_us = None
 | 
			
		||||
    self.legacy_event = None
 | 
			
		||||
    self.category_iids = None
 | 
			
		||||
 | 
			
		||||
  def encode(self):
 | 
			
		||||
    parts = []
 | 
			
		||||
    if self.timestamp_delta_us is not None:
 | 
			
		||||
      writer = encoder.Int64Encoder(1, False, False)
 | 
			
		||||
      writer(parts.append, self.timestamp_delta_us)
 | 
			
		||||
    if self.category_iids is not None:
 | 
			
		||||
      writer = encoder.UInt32Encoder(3, is_repeated=True, is_packed=False)
 | 
			
		||||
      writer(parts.append, self.category_iids)
 | 
			
		||||
    if self.legacy_event is not None:
 | 
			
		||||
      tag = encoder.TagBytes(6, wire_format.WIRETYPE_LENGTH_DELIMITED)
 | 
			
		||||
      data = self.legacy_event.encode()
 | 
			
		||||
      length = encoder._VarintBytes(len(data))
 | 
			
		||||
      parts += [tag, length, data]
 | 
			
		||||
    if self.timestamp_absolute_us is not None:
 | 
			
		||||
      writer = encoder.Int64Encoder(16, False, False)
 | 
			
		||||
      writer(parts.append, self.timestamp_absolute_us)
 | 
			
		||||
 | 
			
		||||
    return b"".join(parts)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class LegacyEvent(object):
 | 
			
		||||
  def __init__(self):
 | 
			
		||||
    self.phase = None
 | 
			
		||||
    self.name_iid = None
 | 
			
		||||
 | 
			
		||||
  def encode(self):
 | 
			
		||||
    parts = []
 | 
			
		||||
    if self.name_iid is not None:
 | 
			
		||||
      writer = encoder.UInt32Encoder(1, False, False)
 | 
			
		||||
      writer(parts.append, self.name_iid)
 | 
			
		||||
    if self.phase is not None:
 | 
			
		||||
      writer = encoder.Int32Encoder(2, False, False)
 | 
			
		||||
      writer(parts.append, self.phase)
 | 
			
		||||
 | 
			
		||||
    return b"".join(parts)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ChromeBenchmarkMetadata(object):
 | 
			
		||||
  def __init__(self):
 | 
			
		||||
    self.benchmark_start_time_us = None
 | 
			
		||||
    self.story_run_time_us = None
 | 
			
		||||
    self.benchmark_name = None
 | 
			
		||||
    self.benchmark_description = None
 | 
			
		||||
    self.story_name = None
 | 
			
		||||
    self.story_tags = None
 | 
			
		||||
    self.story_run_index = None
 | 
			
		||||
    self.label = None
 | 
			
		||||
    self.had_failures = None
 | 
			
		||||
 | 
			
		||||
  def encode(self):
 | 
			
		||||
    parts = []
 | 
			
		||||
    if self.benchmark_start_time_us is not None:
 | 
			
		||||
      writer = encoder.Int64Encoder(1, False, False)
 | 
			
		||||
      writer(parts.append, self.benchmark_start_time_us)
 | 
			
		||||
    if self.story_run_time_us is not None:
 | 
			
		||||
      writer = encoder.Int64Encoder(2, False, False)
 | 
			
		||||
      writer(parts.append, self.story_run_time_us)
 | 
			
		||||
    if self.benchmark_name is not None:
 | 
			
		||||
      writer = encoder.StringEncoder(3, False, False)
 | 
			
		||||
      writer(parts.append, self.benchmark_name)
 | 
			
		||||
    if self.benchmark_description is not None:
 | 
			
		||||
      writer = encoder.StringEncoder(4, False, False)
 | 
			
		||||
      writer(parts.append, self.benchmark_description)
 | 
			
		||||
    if self.label is not None:
 | 
			
		||||
      writer = encoder.StringEncoder(5, False, False)
 | 
			
		||||
      writer(parts.append, self.label)
 | 
			
		||||
    if self.story_name is not None:
 | 
			
		||||
      writer = encoder.StringEncoder(6, False, False)
 | 
			
		||||
      writer(parts.append, self.story_name)
 | 
			
		||||
    if self.story_tags is not None:
 | 
			
		||||
      writer = encoder.StringEncoder(7, is_repeated=True, is_packed=False)
 | 
			
		||||
      writer(parts.append, self.story_tags)
 | 
			
		||||
    if self.story_run_index is not None:
 | 
			
		||||
      writer = encoder.Int32Encoder(8, False, False)
 | 
			
		||||
      writer(parts.append, self.story_run_index)
 | 
			
		||||
    if self.had_failures is not None:
 | 
			
		||||
      writer = encoder.BoolEncoder(9, False, False)
 | 
			
		||||
      writer(parts.append, self.had_failures)
 | 
			
		||||
 | 
			
		||||
    return b"".join(parts)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def write_trace_packet(output, trace_packet):
 | 
			
		||||
  tag = encoder.TagBytes(1, wire_format.WIRETYPE_LENGTH_DELIMITED)
 | 
			
		||||
  output.write(tag)
 | 
			
		||||
  binary_data = trace_packet.encode()
 | 
			
		||||
  encoder._EncodeVarint(output.write, len(binary_data))
 | 
			
		||||
  output.write(binary_data)
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,166 @@
 | 
			
		||||
# Copyright 2019 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
""" Functions to write trace data in perfetto protobuf format.
 | 
			
		||||
"""
 | 
			
		||||
 | 
			
		||||
import collections
 | 
			
		||||
 | 
			
		||||
import perfetto_proto_classes as proto
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Dicts of strings for interning.
 | 
			
		||||
# Note that each thread has its own interning index.
 | 
			
		||||
_interned_categories_by_tid = collections.defaultdict(dict)
 | 
			
		||||
_interned_event_names_by_tid = collections.defaultdict(dict)
 | 
			
		||||
 | 
			
		||||
# Trusted sequence ids from telemetry should not overlap with
 | 
			
		||||
# trusted sequence ids from other trace producers. Chrome assigns
 | 
			
		||||
# sequence ids incrementally starting from 1 and we expect all its ids
 | 
			
		||||
# to be well below 10000. Starting from 2^20 will give us enough
 | 
			
		||||
# confidence that it will not overlap.
 | 
			
		||||
_next_sequence_id = 1<<20
 | 
			
		||||
_sequence_ids = {}
 | 
			
		||||
 | 
			
		||||
# Timestamp of the last event from each thread. Used for delta-encoding
 | 
			
		||||
# of timestamps.
 | 
			
		||||
_last_timestamps = {}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _get_sequence_id(tid):
 | 
			
		||||
  global _sequence_ids
 | 
			
		||||
  global _next_sequence_id
 | 
			
		||||
  if tid not in _sequence_ids:
 | 
			
		||||
    _sequence_ids[tid] = _next_sequence_id
 | 
			
		||||
    _next_sequence_id += 1
 | 
			
		||||
  return _sequence_ids[tid]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _intern_category(category, trace_packet, tid):
 | 
			
		||||
  global _interned_categories_by_tid
 | 
			
		||||
  categories = _interned_categories_by_tid[tid]
 | 
			
		||||
  if category not in categories:
 | 
			
		||||
    # note that interning indices start from 1
 | 
			
		||||
    categories[category] = len(categories) + 1
 | 
			
		||||
    if trace_packet.interned_data is None:
 | 
			
		||||
      trace_packet.interned_data = proto.InternedData()
 | 
			
		||||
    trace_packet.interned_data.event_category = proto.EventCategory()
 | 
			
		||||
    trace_packet.interned_data.event_category.iid = categories[category]
 | 
			
		||||
    trace_packet.interned_data.event_category.name = category
 | 
			
		||||
  return categories[category]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _intern_event_name(event_name, trace_packet, tid):
 | 
			
		||||
  global _interned_event_names_by_tid
 | 
			
		||||
  event_names = _interned_event_names_by_tid[tid]
 | 
			
		||||
  if event_name not in event_names:
 | 
			
		||||
    # note that interning indices start from 1
 | 
			
		||||
    event_names[event_name] = len(event_names) + 1
 | 
			
		||||
    if trace_packet.interned_data is None:
 | 
			
		||||
      trace_packet.interned_data = proto.InternedData()
 | 
			
		||||
    trace_packet.interned_data.legacy_event_name = proto.LegacyEventName()
 | 
			
		||||
    trace_packet.interned_data.legacy_event_name.iid = event_names[event_name]
 | 
			
		||||
    trace_packet.interned_data.legacy_event_name.name = event_name
 | 
			
		||||
  return event_names[event_name]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def write_thread_descriptor_event(output, pid, tid, ts):
 | 
			
		||||
  """ Write the first event in a sequence.
 | 
			
		||||
 | 
			
		||||
  Call this function before writing any other events.
 | 
			
		||||
  Note that this function is NOT thread-safe.
 | 
			
		||||
 | 
			
		||||
  Args:
 | 
			
		||||
    output: a file-like object to write events into.
 | 
			
		||||
    pid: process ID.
 | 
			
		||||
    tid: thread ID.
 | 
			
		||||
    ts: timestamp in microseconds.
 | 
			
		||||
  """
 | 
			
		||||
  global _last_timestamps
 | 
			
		||||
  ts_us = int(ts)
 | 
			
		||||
  _last_timestamps[tid] = ts_us
 | 
			
		||||
 | 
			
		||||
  thread_descriptor_packet = proto.TracePacket()
 | 
			
		||||
  thread_descriptor_packet.trusted_packet_sequence_id = _get_sequence_id(tid)
 | 
			
		||||
  thread_descriptor_packet.thread_descriptor = proto.ThreadDescriptor()
 | 
			
		||||
  thread_descriptor_packet.thread_descriptor.pid = pid
 | 
			
		||||
  # Thread ID from threading module doesn't fit into int32.
 | 
			
		||||
  # But we don't need the exact thread ID, just some number to
 | 
			
		||||
  # distinguish one thread from another. We assume that the last 31 bits
 | 
			
		||||
  # will do for that purpose.
 | 
			
		||||
  thread_descriptor_packet.thread_descriptor.tid = tid & 0x7FFFFFFF
 | 
			
		||||
  thread_descriptor_packet.thread_descriptor.reference_timestamp_us = ts_us
 | 
			
		||||
  thread_descriptor_packet.incremental_state_cleared = True;
 | 
			
		||||
 | 
			
		||||
  proto.write_trace_packet(output, thread_descriptor_packet)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def write_event(output, ph, category, name, ts, args, tid):
 | 
			
		||||
  """ Write a trace event.
 | 
			
		||||
 | 
			
		||||
  Note that this function is NOT thread-safe.
 | 
			
		||||
 | 
			
		||||
  Args:
 | 
			
		||||
    output: a file-like object to write events into.
 | 
			
		||||
    ph: phase of event.
 | 
			
		||||
    category: category of event.
 | 
			
		||||
    name: event name.
 | 
			
		||||
    ts: timestamp in microseconds.
 | 
			
		||||
    args: this argument is currently ignored.
 | 
			
		||||
    tid: thread ID.
 | 
			
		||||
  """
 | 
			
		||||
  del args  # TODO(khokhlov): Encode args as DebugAnnotations.
 | 
			
		||||
 | 
			
		||||
  global _last_timestamps
 | 
			
		||||
  ts_us = int(ts)
 | 
			
		||||
  delta_ts = ts_us - _last_timestamps[tid]
 | 
			
		||||
 | 
			
		||||
  packet = proto.TracePacket()
 | 
			
		||||
  packet.trusted_packet_sequence_id = _get_sequence_id(tid)
 | 
			
		||||
  packet.track_event = proto.TrackEvent()
 | 
			
		||||
 | 
			
		||||
  if delta_ts >= 0:
 | 
			
		||||
    packet.track_event.timestamp_delta_us = delta_ts
 | 
			
		||||
    _last_timestamps[tid] = ts_us
 | 
			
		||||
  else:
 | 
			
		||||
    packet.track_event.timestamp_absolute_us = ts_us
 | 
			
		||||
 | 
			
		||||
  packet.track_event.category_iids = [_intern_category(category, packet, tid)]
 | 
			
		||||
  legacy_event = proto.LegacyEvent()
 | 
			
		||||
  legacy_event.phase = ord(ph)
 | 
			
		||||
  legacy_event.name_iid = _intern_event_name(name, packet, tid)
 | 
			
		||||
  packet.track_event.legacy_event = legacy_event
 | 
			
		||||
  proto.write_trace_packet(output, packet)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def write_metadata(
 | 
			
		||||
    output,
 | 
			
		||||
    benchmark_start_time_us,
 | 
			
		||||
    story_run_time_us,
 | 
			
		||||
    benchmark_name,
 | 
			
		||||
    benchmark_description,
 | 
			
		||||
    story_name,
 | 
			
		||||
    story_tags,
 | 
			
		||||
    story_run_index,
 | 
			
		||||
    label=None,
 | 
			
		||||
    had_failures=None,
 | 
			
		||||
):
 | 
			
		||||
  metadata = proto.ChromeBenchmarkMetadata()
 | 
			
		||||
  metadata.benchmark_start_time_us = int(benchmark_start_time_us)
 | 
			
		||||
  metadata.story_run_time_us = int(story_run_time_us)
 | 
			
		||||
  metadata.benchmark_name = benchmark_name
 | 
			
		||||
  metadata.benchmark_description = benchmark_description
 | 
			
		||||
  metadata.story_name = story_name
 | 
			
		||||
  metadata.story_tags = list(story_tags)
 | 
			
		||||
  metadata.story_run_index = int(story_run_index)
 | 
			
		||||
  if label is not None:
 | 
			
		||||
    metadata.label = label
 | 
			
		||||
  if had_failures is not None:
 | 
			
		||||
    metadata.had_failures = had_failures
 | 
			
		||||
 | 
			
		||||
  packet = proto.TracePacket()
 | 
			
		||||
  packet.chrome_benchmark_metadata = metadata
 | 
			
		||||
  proto.write_trace_packet(output, packet)
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,80 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
# Copyright 2019 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import unittest
 | 
			
		||||
import StringIO
 | 
			
		||||
 | 
			
		||||
from py_trace_event.trace_event_impl import perfetto_trace_writer
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PerfettoTraceWriterTest(unittest.TestCase):
 | 
			
		||||
  """ Tests functions that write perfetto protobufs.
 | 
			
		||||
 | 
			
		||||
  TODO(crbug.com/944078): Switch to using python-protobuf library
 | 
			
		||||
  and implement proper protobuf parsing then.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
  def testWriteThreadDescriptorEvent(self):
 | 
			
		||||
    result = StringIO.StringIO()
 | 
			
		||||
    perfetto_trace_writer.write_thread_descriptor_event(
 | 
			
		||||
        output=result,
 | 
			
		||||
        pid=1,
 | 
			
		||||
        tid=2,
 | 
			
		||||
        ts=1556716807306000,
 | 
			
		||||
    )
 | 
			
		||||
    expected_output = (
 | 
			
		||||
        '\n\x17P\x80\x80@\xc8\x02\x01\xe2\x02\r\x08\x01\x10'
 | 
			
		||||
        '\x020\x90\xf6\xc2\x82\xb6\xfa\xe1\x02'
 | 
			
		||||
    )
 | 
			
		||||
    self.assertEqual(expected_output, result.getvalue())
 | 
			
		||||
 | 
			
		||||
  def testWriteTwoEvents(self):
 | 
			
		||||
    result = StringIO.StringIO()
 | 
			
		||||
    perfetto_trace_writer.write_thread_descriptor_event(
 | 
			
		||||
        output=result,
 | 
			
		||||
        pid=1,
 | 
			
		||||
        tid=2,
 | 
			
		||||
        ts=1556716807306000,
 | 
			
		||||
    )
 | 
			
		||||
    perfetto_trace_writer.write_event(
 | 
			
		||||
        output=result,
 | 
			
		||||
        ph="M",
 | 
			
		||||
        category="category",
 | 
			
		||||
        name="event_name",
 | 
			
		||||
        ts=1556716807406000,
 | 
			
		||||
        args={},
 | 
			
		||||
        tid=2,
 | 
			
		||||
    )
 | 
			
		||||
    expected_output = (
 | 
			
		||||
       '\n\x17P\x80\x80@\xc8\x02\x01\xe2\x02\r\x08\x01\x10'
 | 
			
		||||
       '\x020\x90\xf6\xc2\x82\xb6\xfa\xe1\x02\n2P\x80\x80@Z\x0c\x08'
 | 
			
		||||
       '\xa0\x8d\x06\x18\x012\x04\x08\x01\x10Mb\x1e\n\x0c\x08\x01'
 | 
			
		||||
       '\x12\x08category\x12\x0e\x08\x01\x12\nevent_name'
 | 
			
		||||
    )
 | 
			
		||||
    self.assertEqual(expected_output, result.getvalue())
 | 
			
		||||
 | 
			
		||||
  def testWriteMetadata(self):
 | 
			
		||||
    result = StringIO.StringIO()
 | 
			
		||||
    perfetto_trace_writer.write_metadata(
 | 
			
		||||
        output=result,
 | 
			
		||||
        benchmark_start_time_us=1556716807306000,
 | 
			
		||||
        story_run_time_us=1556716807406000,
 | 
			
		||||
        benchmark_name="benchmark",
 | 
			
		||||
        benchmark_description="description",
 | 
			
		||||
        story_name="story",
 | 
			
		||||
        story_tags=["foo", "bar"],
 | 
			
		||||
        story_run_index=0,
 | 
			
		||||
        label="label",
 | 
			
		||||
        had_failures=False,
 | 
			
		||||
    )
 | 
			
		||||
    expected_output = (
 | 
			
		||||
        '\nI\x82\x03F\x08\x90\xf6\xc2\x82\xb6\xfa\xe1'
 | 
			
		||||
        '\x02\x10\xb0\x83\xc9\x82\xb6\xfa\xe1\x02\x1a\tbenchmark"'
 | 
			
		||||
        '\x0bdescription*\x05label2\x05story:\x03foo:\x03bar@\x00H\x00'
 | 
			
		||||
    )
 | 
			
		||||
    self.assertEqual(expected_output, result.getvalue())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,48 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
import unittest
 | 
			
		||||
 | 
			
		||||
#from .log import *
 | 
			
		||||
#from .parsed_trace_events import *
 | 
			
		||||
 | 
			
		||||
from log import *
 | 
			
		||||
from parsed_trace_events import *
 | 
			
		||||
from py_utils import tempfile_ext
 | 
			
		||||
 | 
			
		||||
class TraceTest(unittest.TestCase):
 | 
			
		||||
  def __init__(self, *args):
 | 
			
		||||
    """
 | 
			
		||||
    Infrastructure for running tests of the tracing system.
 | 
			
		||||
 | 
			
		||||
    Does not actually run any tests. Look at subclasses for those.
 | 
			
		||||
    """
 | 
			
		||||
    unittest.TestCase.__init__(self, *args)
 | 
			
		||||
    self._file = None
 | 
			
		||||
 | 
			
		||||
  def go(self, cb):
 | 
			
		||||
    """
 | 
			
		||||
    Enables tracing, runs the provided callback, and if successful, returns a
 | 
			
		||||
    TraceEvents object with the results.
 | 
			
		||||
    """
 | 
			
		||||
    with tempfile_ext.TemporaryFileName() as filename:
 | 
			
		||||
      self._file = open(filename, 'a+')
 | 
			
		||||
      trace_enable(self._file)
 | 
			
		||||
      try:
 | 
			
		||||
        cb()
 | 
			
		||||
      finally:
 | 
			
		||||
        trace_disable()
 | 
			
		||||
      e = ParsedTraceEvents(trace_filename=self._file.name)
 | 
			
		||||
      self._file.close()
 | 
			
		||||
      self._file = None
 | 
			
		||||
    return e
 | 
			
		||||
 | 
			
		||||
  @property
 | 
			
		||||
  def trace_filename(self):
 | 
			
		||||
    return self._file.name
 | 
			
		||||
 | 
			
		||||
  def tearDown(self):
 | 
			
		||||
    if trace_is_enabled():
 | 
			
		||||
      trace_disable()
 | 
			
		||||
    if self._file:
 | 
			
		||||
      self._file.close()
 | 
			
		||||
@ -0,0 +1,518 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
# Copyright 2014 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
import contextlib
 | 
			
		||||
import json
 | 
			
		||||
import logging
 | 
			
		||||
import math
 | 
			
		||||
import multiprocessing
 | 
			
		||||
import os
 | 
			
		||||
import time
 | 
			
		||||
import unittest
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
from py_trace_event import trace_event
 | 
			
		||||
from py_trace_event import trace_time
 | 
			
		||||
from py_trace_event.trace_event_impl import log
 | 
			
		||||
from py_trace_event.trace_event_impl import multiprocessing_shim
 | 
			
		||||
from py_utils import tempfile_ext
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TraceEventTests(unittest.TestCase):
 | 
			
		||||
 | 
			
		||||
  @contextlib.contextmanager
 | 
			
		||||
  def _test_trace(self, disable=True, format=None):
 | 
			
		||||
    with tempfile_ext.TemporaryFileName() as filename:
 | 
			
		||||
      self._log_path = filename
 | 
			
		||||
      try:
 | 
			
		||||
        trace_event.trace_enable(self._log_path, format=format)
 | 
			
		||||
        yield
 | 
			
		||||
      finally:
 | 
			
		||||
        if disable:
 | 
			
		||||
          trace_event.trace_disable()
 | 
			
		||||
 | 
			
		||||
  def testNoImpl(self):
 | 
			
		||||
    orig_impl = trace_event.trace_event_impl
 | 
			
		||||
    try:
 | 
			
		||||
      trace_event.trace_event_impl = None
 | 
			
		||||
      self.assertFalse(trace_event.trace_can_enable())
 | 
			
		||||
    finally:
 | 
			
		||||
      trace_event.trace_event_impl = orig_impl
 | 
			
		||||
 | 
			
		||||
  def testImpl(self):
 | 
			
		||||
    self.assertTrue(trace_event.trace_can_enable())
 | 
			
		||||
 | 
			
		||||
  def testIsEnabledFalse(self):
 | 
			
		||||
    self.assertFalse(trace_event.trace_is_enabled())
 | 
			
		||||
 | 
			
		||||
  def testIsEnabledTrue(self):
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      self.assertTrue(trace_event.trace_is_enabled())
 | 
			
		||||
 | 
			
		||||
  def testEnable(self):
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
        self.assertEquals(len(log_output), 1)
 | 
			
		||||
        self.assertTrue(trace_event.trace_is_enabled())
 | 
			
		||||
        log_output = log_output.pop()
 | 
			
		||||
        self.assertEquals(log_output['category'], 'process_argv')
 | 
			
		||||
        self.assertEquals(log_output['name'], 'process_argv')
 | 
			
		||||
        self.assertTrue(log_output['args']['argv'])
 | 
			
		||||
        self.assertEquals(log_output['ph'], 'M')
 | 
			
		||||
 | 
			
		||||
  def testDoubleEnable(self):
 | 
			
		||||
    try:
 | 
			
		||||
      with self._test_trace():
 | 
			
		||||
        with self._test_trace():
 | 
			
		||||
          pass
 | 
			
		||||
    except log.TraceException:
 | 
			
		||||
      return
 | 
			
		||||
    assert False
 | 
			
		||||
 | 
			
		||||
  def testDisable(self):
 | 
			
		||||
    _old_multiprocessing_process = multiprocessing.Process
 | 
			
		||||
    with self._test_trace(disable=False):
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        self.assertTrue(trace_event.trace_is_enabled())
 | 
			
		||||
        self.assertEqual(
 | 
			
		||||
            multiprocessing.Process, multiprocessing_shim.ProcessShim)
 | 
			
		||||
        trace_event.trace_disable()
 | 
			
		||||
        self.assertEqual(
 | 
			
		||||
            multiprocessing.Process, _old_multiprocessing_process)
 | 
			
		||||
        self.assertEquals(len(json.loads(f.read() + ']')), 1)
 | 
			
		||||
        self.assertFalse(trace_event.trace_is_enabled())
 | 
			
		||||
 | 
			
		||||
  def testDoubleDisable(self):
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      pass
 | 
			
		||||
    trace_event.trace_disable()
 | 
			
		||||
 | 
			
		||||
  def testFlushChanges(self):
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        trace_event.clock_sync('1')
 | 
			
		||||
        self.assertEquals(len(json.loads(f.read() + ']')), 1)
 | 
			
		||||
        f.seek(0)
 | 
			
		||||
        trace_event.trace_flush()
 | 
			
		||||
        self.assertEquals(len(json.loads(f.read() + ']')), 2)
 | 
			
		||||
 | 
			
		||||
  def testFlushNoChanges(self):
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        self.assertEquals(len(json.loads(f.read() + ']')),1)
 | 
			
		||||
        f.seek(0)
 | 
			
		||||
        trace_event.trace_flush()
 | 
			
		||||
        self.assertEquals(len(json.loads(f.read() + ']')), 1)
 | 
			
		||||
 | 
			
		||||
  def testDoubleFlush(self):
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        trace_event.clock_sync('1')
 | 
			
		||||
        self.assertEquals(len(json.loads(f.read() + ']')), 1)
 | 
			
		||||
        f.seek(0)
 | 
			
		||||
        trace_event.trace_flush()
 | 
			
		||||
        trace_event.trace_flush()
 | 
			
		||||
        self.assertEquals(len(json.loads(f.read() + ']')), 2)
 | 
			
		||||
 | 
			
		||||
  def testTraceBegin(self):
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        trace_event.trace_begin('test_event', this='that')
 | 
			
		||||
        trace_event.trace_flush()
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
        self.assertEquals(len(log_output), 2)
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'process_argv')
 | 
			
		||||
        self.assertEquals(current_entry['name'], 'process_argv')
 | 
			
		||||
        self.assertTrue( current_entry['args']['argv'])
 | 
			
		||||
        self.assertEquals( current_entry['ph'], 'M')
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'python')
 | 
			
		||||
        self.assertEquals(current_entry['name'], 'test_event')
 | 
			
		||||
        self.assertEquals(current_entry['args']['this'], '\'that\'')
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'B')
 | 
			
		||||
 | 
			
		||||
  def testTraceEnd(self):
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        trace_event.trace_end('test_event')
 | 
			
		||||
        trace_event.trace_flush()
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
        self.assertEquals(len(log_output), 2)
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'process_argv')
 | 
			
		||||
        self.assertEquals(current_entry['name'], 'process_argv')
 | 
			
		||||
        self.assertTrue(current_entry['args']['argv'])
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'M')
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'python')
 | 
			
		||||
        self.assertEquals(current_entry['name'], 'test_event')
 | 
			
		||||
        self.assertEquals(current_entry['args'], {})
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'E')
 | 
			
		||||
 | 
			
		||||
  def testTrace(self):
 | 
			
		||||
   with self._test_trace():
 | 
			
		||||
      with trace_event.trace('test_event', this='that'):
 | 
			
		||||
        pass
 | 
			
		||||
      trace_event.trace_flush()
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
        self.assertEquals(len(log_output), 3)
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'process_argv')
 | 
			
		||||
        self.assertEquals(current_entry['name'], 'process_argv')
 | 
			
		||||
        self.assertTrue(current_entry['args']['argv'])
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'M')
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'python')
 | 
			
		||||
        self.assertEquals(current_entry['name'], 'test_event')
 | 
			
		||||
        self.assertEquals(current_entry['args']['this'], '\'that\'')
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'B')
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'python')
 | 
			
		||||
        self.assertEquals(current_entry['name'], 'test_event')
 | 
			
		||||
        self.assertEquals(current_entry['args'], {})
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'E')
 | 
			
		||||
 | 
			
		||||
  def testTracedDecorator(self):
 | 
			
		||||
    @trace_event.traced("this")
 | 
			
		||||
    def test_decorator(this="that"):
 | 
			
		||||
      pass
 | 
			
		||||
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      test_decorator()
 | 
			
		||||
      trace_event.trace_flush()
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
        self.assertEquals(len(log_output), 3)
 | 
			
		||||
        expected_name = __name__ + '.test_decorator'
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'process_argv')
 | 
			
		||||
        self.assertEquals(current_entry['name'], 'process_argv')
 | 
			
		||||
        self.assertTrue(current_entry['args']['argv'])
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'M')
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'python')
 | 
			
		||||
        self.assertEquals(current_entry['name'], expected_name)
 | 
			
		||||
        self.assertEquals(current_entry['args']['this'], '\'that\'')
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'B')
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'python')
 | 
			
		||||
        self.assertEquals(current_entry['name'], expected_name)
 | 
			
		||||
        self.assertEquals(current_entry['args'], {})
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'E')
 | 
			
		||||
 | 
			
		||||
  def testClockSyncWithTs(self):
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        trace_event.clock_sync('id', issue_ts=trace_time.Now())
 | 
			
		||||
        trace_event.trace_flush()
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
        self.assertEquals(len(log_output), 2)
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'process_argv')
 | 
			
		||||
        self.assertEquals(current_entry['name'], 'process_argv')
 | 
			
		||||
        self.assertTrue(current_entry['args']['argv'])
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'M')
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'python')
 | 
			
		||||
        self.assertEquals(current_entry['name'], 'clock_sync')
 | 
			
		||||
        self.assertTrue(current_entry['args']['issue_ts'])
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'c')
 | 
			
		||||
 | 
			
		||||
  def testClockSyncWithoutTs(self):
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        trace_event.clock_sync('id')
 | 
			
		||||
        trace_event.trace_flush()
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
        self.assertEquals(len(log_output), 2)
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'process_argv')
 | 
			
		||||
        self.assertEquals(current_entry['name'], 'process_argv')
 | 
			
		||||
        self.assertTrue(current_entry['args']['argv'])
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'M')
 | 
			
		||||
        current_entry = log_output.pop(0)
 | 
			
		||||
        self.assertEquals(current_entry['category'], 'python')
 | 
			
		||||
        self.assertEquals(current_entry['name'], 'clock_sync')
 | 
			
		||||
        self.assertFalse(current_entry['args'].get('issue_ts'))
 | 
			
		||||
        self.assertEquals(current_entry['ph'], 'c')
 | 
			
		||||
 | 
			
		||||
  def testTime(self):
 | 
			
		||||
    actual_diff = []
 | 
			
		||||
    def func1():
 | 
			
		||||
      trace_begin("func1")
 | 
			
		||||
      start = time.time()
 | 
			
		||||
      time.sleep(0.25)
 | 
			
		||||
      end = time.time()
 | 
			
		||||
      actual_diff.append(end-start) # Pass via array because of Python scoping
 | 
			
		||||
      trace_end("func1")
 | 
			
		||||
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      start_ts = time.time()
 | 
			
		||||
      trace_event.trace_begin('test')
 | 
			
		||||
      end_ts = time.time()
 | 
			
		||||
      trace_event.trace_end('test')
 | 
			
		||||
      trace_event.trace_flush()
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
        self.assertEquals(len(log_output), 3)
 | 
			
		||||
        meta_data = log_output[0]
 | 
			
		||||
        open_data = log_output[1]
 | 
			
		||||
        close_data = log_output[2]
 | 
			
		||||
        self.assertEquals(meta_data['category'], 'process_argv')
 | 
			
		||||
        self.assertEquals(meta_data['name'], 'process_argv')
 | 
			
		||||
        self.assertTrue(meta_data['args']['argv'])
 | 
			
		||||
        self.assertEquals(meta_data['ph'], 'M')
 | 
			
		||||
        self.assertEquals(open_data['category'], 'python')
 | 
			
		||||
        self.assertEquals(open_data['name'], 'test')
 | 
			
		||||
        self.assertEquals(open_data['ph'], 'B')
 | 
			
		||||
        self.assertEquals(close_data['category'], 'python')
 | 
			
		||||
        self.assertEquals(close_data['name'], 'test')
 | 
			
		||||
        self.assertEquals(close_data['ph'], 'E')
 | 
			
		||||
        event_time_diff = close_data['ts'] - open_data['ts']
 | 
			
		||||
        recorded_time_diff = (end_ts - start_ts) * 1000000
 | 
			
		||||
        self.assertLess(math.fabs(event_time_diff - recorded_time_diff), 1000)
 | 
			
		||||
 | 
			
		||||
  def testNestedCalls(self):
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      trace_event.trace_begin('one')
 | 
			
		||||
      trace_event.trace_begin('two')
 | 
			
		||||
      trace_event.trace_end('two')
 | 
			
		||||
      trace_event.trace_end('one')
 | 
			
		||||
      trace_event.trace_flush()
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
        self.assertEquals(len(log_output), 5)
 | 
			
		||||
        meta_data = log_output[0]
 | 
			
		||||
        one_open = log_output[1]
 | 
			
		||||
        two_open = log_output[2]
 | 
			
		||||
        two_close = log_output[3]
 | 
			
		||||
        one_close = log_output[4]
 | 
			
		||||
        self.assertEquals(meta_data['category'], 'process_argv')
 | 
			
		||||
        self.assertEquals(meta_data['name'], 'process_argv')
 | 
			
		||||
        self.assertTrue(meta_data['args']['argv'])
 | 
			
		||||
        self.assertEquals(meta_data['ph'], 'M')
 | 
			
		||||
 | 
			
		||||
        self.assertEquals(one_open['category'], 'python')
 | 
			
		||||
        self.assertEquals(one_open['name'], 'one')
 | 
			
		||||
        self.assertEquals(one_open['ph'], 'B')
 | 
			
		||||
        self.assertEquals(one_close['category'], 'python')
 | 
			
		||||
        self.assertEquals(one_close['name'], 'one')
 | 
			
		||||
        self.assertEquals(one_close['ph'], 'E')
 | 
			
		||||
 | 
			
		||||
        self.assertEquals(two_open['category'], 'python')
 | 
			
		||||
        self.assertEquals(two_open['name'], 'two')
 | 
			
		||||
        self.assertEquals(two_open['ph'], 'B')
 | 
			
		||||
        self.assertEquals(two_close['category'], 'python')
 | 
			
		||||
        self.assertEquals(two_close['name'], 'two')
 | 
			
		||||
        self.assertEquals(two_close['ph'], 'E')
 | 
			
		||||
 | 
			
		||||
        self.assertLessEqual(one_open['ts'], two_open['ts'])
 | 
			
		||||
        self.assertGreaterEqual(one_close['ts'], two_close['ts'])
 | 
			
		||||
 | 
			
		||||
  def testInterleavedCalls(self):
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      trace_event.trace_begin('one')
 | 
			
		||||
      trace_event.trace_begin('two')
 | 
			
		||||
      trace_event.trace_end('one')
 | 
			
		||||
      trace_event.trace_end('two')
 | 
			
		||||
      trace_event.trace_flush()
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
        self.assertEquals(len(log_output), 5)
 | 
			
		||||
        meta_data = log_output[0]
 | 
			
		||||
        one_open = log_output[1]
 | 
			
		||||
        two_open = log_output[2]
 | 
			
		||||
        two_close = log_output[4]
 | 
			
		||||
        one_close = log_output[3]
 | 
			
		||||
        self.assertEquals(meta_data['category'], 'process_argv')
 | 
			
		||||
        self.assertEquals(meta_data['name'], 'process_argv')
 | 
			
		||||
        self.assertTrue(meta_data['args']['argv'])
 | 
			
		||||
        self.assertEquals(meta_data['ph'], 'M')
 | 
			
		||||
 | 
			
		||||
        self.assertEquals(one_open['category'], 'python')
 | 
			
		||||
        self.assertEquals(one_open['name'], 'one')
 | 
			
		||||
        self.assertEquals(one_open['ph'], 'B')
 | 
			
		||||
        self.assertEquals(one_close['category'], 'python')
 | 
			
		||||
        self.assertEquals(one_close['name'], 'one')
 | 
			
		||||
        self.assertEquals(one_close['ph'], 'E')
 | 
			
		||||
 | 
			
		||||
        self.assertEquals(two_open['category'], 'python')
 | 
			
		||||
        self.assertEquals(two_open['name'], 'two')
 | 
			
		||||
        self.assertEquals(two_open['ph'], 'B')
 | 
			
		||||
        self.assertEquals(two_close['category'], 'python')
 | 
			
		||||
        self.assertEquals(two_close['name'], 'two')
 | 
			
		||||
        self.assertEquals(two_close['ph'], 'E')
 | 
			
		||||
 | 
			
		||||
        self.assertLessEqual(one_open['ts'], two_open['ts'])
 | 
			
		||||
        self.assertLessEqual(one_close['ts'], two_close['ts'])
 | 
			
		||||
 | 
			
		||||
  # TODO(khokhlov): Fix this test on Windows. See crbug.com/945819 for details.
 | 
			
		||||
  def disabled_testMultiprocess(self):
 | 
			
		||||
    def child_function():
 | 
			
		||||
      with trace_event.trace('child_event'):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      trace_event.trace_begin('parent_event')
 | 
			
		||||
      trace_event.trace_flush()
 | 
			
		||||
      p = multiprocessing.Process(target=child_function)
 | 
			
		||||
      p.start()
 | 
			
		||||
      self.assertTrue(hasattr(p, "_shimmed_by_trace_event"))
 | 
			
		||||
      p.join()
 | 
			
		||||
      trace_event.trace_end('parent_event')
 | 
			
		||||
      trace_event.trace_flush()
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
        self.assertEquals(len(log_output), 5)
 | 
			
		||||
        meta_data = log_output[0]
 | 
			
		||||
        parent_open = log_output[1]
 | 
			
		||||
        child_open = log_output[2]
 | 
			
		||||
        child_close = log_output[3]
 | 
			
		||||
        parent_close = log_output[4]
 | 
			
		||||
        self.assertEquals(meta_data['category'], 'process_argv')
 | 
			
		||||
        self.assertEquals(meta_data['name'], 'process_argv')
 | 
			
		||||
        self.assertTrue(meta_data['args']['argv'])
 | 
			
		||||
        self.assertEquals(meta_data['ph'], 'M')
 | 
			
		||||
 | 
			
		||||
        self.assertEquals(parent_open['category'], 'python')
 | 
			
		||||
        self.assertEquals(parent_open['name'], 'parent_event')
 | 
			
		||||
        self.assertEquals(parent_open['ph'], 'B')
 | 
			
		||||
 | 
			
		||||
        self.assertEquals(child_open['category'], 'python')
 | 
			
		||||
        self.assertEquals(child_open['name'], 'child_event')
 | 
			
		||||
        self.assertEquals(child_open['ph'], 'B')
 | 
			
		||||
 | 
			
		||||
        self.assertEquals(child_close['category'], 'python')
 | 
			
		||||
        self.assertEquals(child_close['name'], 'child_event')
 | 
			
		||||
        self.assertEquals(child_close['ph'], 'E')
 | 
			
		||||
 | 
			
		||||
        self.assertEquals(parent_close['category'], 'python')
 | 
			
		||||
        self.assertEquals(parent_close['name'], 'parent_event')
 | 
			
		||||
        self.assertEquals(parent_close['ph'], 'E')
 | 
			
		||||
 | 
			
		||||
  @unittest.skipIf(sys.platform == 'win32', 'crbug.com/945819')
 | 
			
		||||
  def testTracingControlDisabledInChildButNotInParent(self):
 | 
			
		||||
    def child(resp):
 | 
			
		||||
      # test tracing is not controllable in the child
 | 
			
		||||
      resp.put(trace_event.is_tracing_controllable())
 | 
			
		||||
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      q = multiprocessing.Queue()
 | 
			
		||||
      p = multiprocessing.Process(target=child, args=[q])
 | 
			
		||||
      p.start()
 | 
			
		||||
      # test tracing is controllable in the parent
 | 
			
		||||
      self.assertTrue(trace_event.is_tracing_controllable())
 | 
			
		||||
      self.assertFalse(q.get())
 | 
			
		||||
      p.join()
 | 
			
		||||
 | 
			
		||||
  def testMultiprocessExceptionInChild(self):
 | 
			
		||||
    def bad_child():
 | 
			
		||||
      trace_event.trace_disable()
 | 
			
		||||
 | 
			
		||||
    with self._test_trace():
 | 
			
		||||
      p = multiprocessing.Pool(1)
 | 
			
		||||
      trace_event.trace_begin('parent')
 | 
			
		||||
      self.assertRaises(Exception, lambda: p.apply(bad_child, ()))
 | 
			
		||||
      p.close()
 | 
			
		||||
      p.terminate()
 | 
			
		||||
      p.join()
 | 
			
		||||
      trace_event.trace_end('parent')
 | 
			
		||||
      trace_event.trace_flush()
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
        self.assertEquals(len(log_output), 3)
 | 
			
		||||
        meta_data = log_output[0]
 | 
			
		||||
        parent_open = log_output[1]
 | 
			
		||||
        parent_close = log_output[2]
 | 
			
		||||
        self.assertEquals(parent_open['category'], 'python')
 | 
			
		||||
        self.assertEquals(parent_open['name'], 'parent')
 | 
			
		||||
        self.assertEquals(parent_open['ph'], 'B')
 | 
			
		||||
        self.assertEquals(parent_close['category'], 'python')
 | 
			
		||||
        self.assertEquals(parent_close['name'], 'parent')
 | 
			
		||||
        self.assertEquals(parent_close['ph'], 'E')
 | 
			
		||||
 | 
			
		||||
  def testFormatJson(self):
 | 
			
		||||
    with self._test_trace(format=trace_event.JSON):
 | 
			
		||||
      trace_event.trace_flush()
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        log_output = json.loads(f.read() + ']')
 | 
			
		||||
    self.assertEquals(len(log_output), 1)
 | 
			
		||||
    self.assertEquals(log_output[0]['ph'], 'M')
 | 
			
		||||
 | 
			
		||||
  def testFormatJsonWithMetadata(self):
 | 
			
		||||
    with self._test_trace(format=trace_event.JSON_WITH_METADATA):
 | 
			
		||||
      trace_event.trace_disable()
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        log_output = json.load(f)
 | 
			
		||||
    self.assertEquals(len(log_output), 2)
 | 
			
		||||
    events = log_output['traceEvents']
 | 
			
		||||
    self.assertEquals(len(events), 1)
 | 
			
		||||
    self.assertEquals(events[0]['ph'], 'M')
 | 
			
		||||
 | 
			
		||||
  def testFormatProtobuf(self):
 | 
			
		||||
    with self._test_trace(format=trace_event.PROTOBUF):
 | 
			
		||||
      trace_event.trace_flush()
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        self.assertGreater(len(f.read()), 0)
 | 
			
		||||
 | 
			
		||||
  def testAddMetadata(self):
 | 
			
		||||
    with self._test_trace(format=trace_event.JSON_WITH_METADATA):
 | 
			
		||||
      trace_event.trace_add_benchmark_metadata(
 | 
			
		||||
          benchmark_start_time_us=1000,
 | 
			
		||||
          story_run_time_us=2000,
 | 
			
		||||
          benchmark_name='benchmark',
 | 
			
		||||
          benchmark_description='desc',
 | 
			
		||||
          story_name='story',
 | 
			
		||||
          story_tags=['tag1', 'tag2'],
 | 
			
		||||
          story_run_index=0,
 | 
			
		||||
      )
 | 
			
		||||
      trace_event.trace_disable()
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        log_output = json.load(f)
 | 
			
		||||
    self.assertEquals(len(log_output), 2)
 | 
			
		||||
    telemetry_metadata = log_output['metadata']['telemetry']
 | 
			
		||||
    self.assertEquals(len(telemetry_metadata), 7)
 | 
			
		||||
    self.assertEquals(telemetry_metadata['benchmarkStart'], 1)
 | 
			
		||||
    self.assertEquals(telemetry_metadata['traceStart'], 2)
 | 
			
		||||
    self.assertEquals(telemetry_metadata['benchmarks'], ['benchmark'])
 | 
			
		||||
    self.assertEquals(telemetry_metadata['benchmarkDescriptions'], ['desc'])
 | 
			
		||||
    self.assertEquals(telemetry_metadata['stories'], ['story'])
 | 
			
		||||
    self.assertEquals(telemetry_metadata['storyTags'], ['tag1', 'tag2'])
 | 
			
		||||
    self.assertEquals(telemetry_metadata['storysetRepeats'], [0])
 | 
			
		||||
 | 
			
		||||
  def testAddMetadataProtobuf(self):
 | 
			
		||||
    with self._test_trace(format=trace_event.PROTOBUF):
 | 
			
		||||
      trace_event.trace_add_benchmark_metadata(
 | 
			
		||||
          benchmark_start_time_us=1000,
 | 
			
		||||
          story_run_time_us=2000,
 | 
			
		||||
          benchmark_name='benchmark',
 | 
			
		||||
          benchmark_description='desc',
 | 
			
		||||
          story_name='story',
 | 
			
		||||
          story_tags=['tag1', 'tag2'],
 | 
			
		||||
          story_run_index=0,
 | 
			
		||||
      )
 | 
			
		||||
      trace_event.trace_disable()
 | 
			
		||||
      with open(self._log_path, 'r') as f:
 | 
			
		||||
        self.assertGreater(len(f.read()), 0)
 | 
			
		||||
 | 
			
		||||
  def testAddMetadataInJsonFormatRaises(self):
 | 
			
		||||
    with self._test_trace(format=trace_event.JSON):
 | 
			
		||||
      with self.assertRaises(log.TraceException):
 | 
			
		||||
        trace_event.trace_add_benchmark_metadata(
 | 
			
		||||
            benchmark_start_time_us=1000,
 | 
			
		||||
            story_run_time_us=2000,
 | 
			
		||||
            benchmark_name='benchmark',
 | 
			
		||||
            benchmark_description='description',
 | 
			
		||||
            story_name='story',
 | 
			
		||||
            story_tags=['tag1', 'tag2'],
 | 
			
		||||
            story_run_index=0,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  logging.getLogger().setLevel(logging.DEBUG)
 | 
			
		||||
  unittest.main(verbosity=2)
 | 
			
		||||
@ -0,0 +1,234 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import ctypes
 | 
			
		||||
import ctypes.util
 | 
			
		||||
import logging
 | 
			
		||||
import os
 | 
			
		||||
import platform
 | 
			
		||||
import sys
 | 
			
		||||
import time
 | 
			
		||||
import threading
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
GET_TICK_COUNT_LAST_NOW = 0
 | 
			
		||||
# If GET_TICK_COUNTER_LAST_NOW is less than the current time, the clock has
 | 
			
		||||
# rolled over, and this needs to be accounted for.
 | 
			
		||||
GET_TICK_COUNT_WRAPAROUNDS = 0
 | 
			
		||||
# The current detected platform
 | 
			
		||||
_CLOCK = None
 | 
			
		||||
_NOW_FUNCTION = None
 | 
			
		||||
# Mapping of supported platforms and what is returned by sys.platform.
 | 
			
		||||
_PLATFORMS = {
 | 
			
		||||
    'mac': 'darwin',
 | 
			
		||||
    'linux': 'linux',
 | 
			
		||||
    'windows': 'win32',
 | 
			
		||||
    'cygwin': 'cygwin',
 | 
			
		||||
    'freebsd': 'freebsd',
 | 
			
		||||
    'sunos': 'sunos5',
 | 
			
		||||
    'bsd': 'bsd'
 | 
			
		||||
}
 | 
			
		||||
# Mapping of what to pass get_clocktime based on platform.
 | 
			
		||||
_CLOCK_MONOTONIC = {
 | 
			
		||||
    'linux': 1,
 | 
			
		||||
    'freebsd': 4,
 | 
			
		||||
    'bsd': 3,
 | 
			
		||||
    'sunos5': 4
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
_LINUX_CLOCK = 'LINUX_CLOCK_MONOTONIC'
 | 
			
		||||
_MAC_CLOCK = 'MAC_MACH_ABSOLUTE_TIME'
 | 
			
		||||
_WIN_HIRES = 'WIN_QPC'
 | 
			
		||||
_WIN_LORES = 'WIN_ROLLOVER_PROTECTED_TIME_GET_TIME'
 | 
			
		||||
 | 
			
		||||
def InitializeMacNowFunction(plat):
 | 
			
		||||
  """Sets a monotonic clock for the Mac platform.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
      plat: Platform that is being run on. Unused in GetMacNowFunction. Passed
 | 
			
		||||
        for consistency between initilaizers.
 | 
			
		||||
  """
 | 
			
		||||
  del plat  # Unused
 | 
			
		||||
  global _CLOCK  # pylint: disable=global-statement
 | 
			
		||||
  global _NOW_FUNCTION  # pylint: disable=global-statement
 | 
			
		||||
  _CLOCK = _MAC_CLOCK
 | 
			
		||||
  libc = ctypes.CDLL('/usr/lib/libc.dylib', use_errno=True)
 | 
			
		||||
  class MachTimebaseInfoData(ctypes.Structure):
 | 
			
		||||
    """System timebase info. Defined in <mach/mach_time.h>."""
 | 
			
		||||
    _fields_ = (('numer', ctypes.c_uint32),
 | 
			
		||||
                ('denom', ctypes.c_uint32))
 | 
			
		||||
 | 
			
		||||
  mach_absolute_time = libc.mach_absolute_time
 | 
			
		||||
  mach_absolute_time.restype = ctypes.c_uint64
 | 
			
		||||
 | 
			
		||||
  timebase = MachTimebaseInfoData()
 | 
			
		||||
  libc.mach_timebase_info(ctypes.byref(timebase))
 | 
			
		||||
  ticks_per_second = timebase.numer / timebase.denom * 1.0e9
 | 
			
		||||
 | 
			
		||||
  def MacNowFunctionImpl():
 | 
			
		||||
    return mach_absolute_time() / ticks_per_second
 | 
			
		||||
  _NOW_FUNCTION = MacNowFunctionImpl
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetClockGetTimeClockNumber(plat):
 | 
			
		||||
  for key in _CLOCK_MONOTONIC:
 | 
			
		||||
    if plat.startswith(key):
 | 
			
		||||
      return _CLOCK_MONOTONIC[key]
 | 
			
		||||
  raise LookupError('Platform not in clock dicitonary')
 | 
			
		||||
 | 
			
		||||
def InitializeLinuxNowFunction(plat):
 | 
			
		||||
  """Sets a monotonic clock for linux platforms.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
      plat: Platform that is being run on.
 | 
			
		||||
  """
 | 
			
		||||
  global _CLOCK  # pylint: disable=global-statement
 | 
			
		||||
  global _NOW_FUNCTION  # pylint: disable=global-statement
 | 
			
		||||
  _CLOCK = _LINUX_CLOCK
 | 
			
		||||
  clock_monotonic = GetClockGetTimeClockNumber(plat)
 | 
			
		||||
  try:
 | 
			
		||||
    # Attempt to find clock_gettime in the C library.
 | 
			
		||||
    clock_gettime = ctypes.CDLL(ctypes.util.find_library('c'),
 | 
			
		||||
                                use_errno=True).clock_gettime
 | 
			
		||||
  except AttributeError:
 | 
			
		||||
    # If not able to find int in the C library, look in rt library.
 | 
			
		||||
    clock_gettime = ctypes.CDLL(ctypes.util.find_library('rt'),
 | 
			
		||||
                                use_errno=True).clock_gettime
 | 
			
		||||
 | 
			
		||||
  class Timespec(ctypes.Structure):
 | 
			
		||||
    """Time specification, as described in clock_gettime(3)."""
 | 
			
		||||
    _fields_ = (('tv_sec', ctypes.c_long),
 | 
			
		||||
                ('tv_nsec', ctypes.c_long))
 | 
			
		||||
 | 
			
		||||
  def LinuxNowFunctionImpl():
 | 
			
		||||
    ts = Timespec()
 | 
			
		||||
    if clock_gettime(clock_monotonic, ctypes.pointer(ts)):
 | 
			
		||||
      errno = ctypes.get_errno()
 | 
			
		||||
      raise OSError(errno, os.strerror(errno))
 | 
			
		||||
    return ts.tv_sec + ts.tv_nsec / 1.0e9
 | 
			
		||||
 | 
			
		||||
  _NOW_FUNCTION = LinuxNowFunctionImpl
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def IsQPCUsable():
 | 
			
		||||
  """Determines if system can query the performance counter.
 | 
			
		||||
    The performance counter is a high resolution timer on windows systems.
 | 
			
		||||
    Some chipsets have unreliable performance counters, so this checks that one
 | 
			
		||||
    of those chipsets is not present.
 | 
			
		||||
 | 
			
		||||
    Returns:
 | 
			
		||||
      True if QPC is useable, false otherwise.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
  # Sample output: 'Intel64 Family 6 Model 23 Stepping 6, GenuineIntel'
 | 
			
		||||
  info = platform.processor()
 | 
			
		||||
  if 'AuthenticAMD' in info and 'Family 15' in info:
 | 
			
		||||
    return False
 | 
			
		||||
  if not hasattr(ctypes, 'windll'):
 | 
			
		||||
    return False
 | 
			
		||||
  try:  # If anything goes wrong during this, assume QPC isn't available.
 | 
			
		||||
    frequency = ctypes.c_int64()
 | 
			
		||||
    ctypes.windll.Kernel32.QueryPerformanceFrequency(
 | 
			
		||||
        ctypes.byref(frequency))
 | 
			
		||||
    if float(frequency.value) <= 0:
 | 
			
		||||
      return False
 | 
			
		||||
  except Exception:  # pylint: disable=broad-except
 | 
			
		||||
    logging.exception('Error when determining if QPC is usable.')
 | 
			
		||||
    return False
 | 
			
		||||
  return True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def InitializeWinNowFunction(plat):
 | 
			
		||||
  """Sets a monotonic clock for windows platforms.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
      plat: Platform that is being run on.
 | 
			
		||||
  """
 | 
			
		||||
  global _CLOCK  # pylint: disable=global-statement
 | 
			
		||||
  global _NOW_FUNCTION  # pylint: disable=global-statement
 | 
			
		||||
 | 
			
		||||
  if IsQPCUsable():
 | 
			
		||||
    _CLOCK = _WIN_HIRES
 | 
			
		||||
    qpc_return = ctypes.c_int64()
 | 
			
		||||
    qpc_frequency = ctypes.c_int64()
 | 
			
		||||
    ctypes.windll.Kernel32.QueryPerformanceFrequency(
 | 
			
		||||
        ctypes.byref(qpc_frequency))
 | 
			
		||||
    qpc_frequency = float(qpc_frequency.value)
 | 
			
		||||
    qpc = ctypes.windll.Kernel32.QueryPerformanceCounter
 | 
			
		||||
 | 
			
		||||
    def WinNowFunctionImpl():
 | 
			
		||||
      qpc(ctypes.byref(qpc_return))
 | 
			
		||||
      return qpc_return.value / qpc_frequency
 | 
			
		||||
 | 
			
		||||
  else:
 | 
			
		||||
    _CLOCK = _WIN_LORES
 | 
			
		||||
    kernel32 = (ctypes.cdll.kernel32
 | 
			
		||||
                if plat.startswith(_PLATFORMS['cygwin'])
 | 
			
		||||
                else ctypes.windll.kernel32)
 | 
			
		||||
    get_tick_count_64 = getattr(kernel32, 'GetTickCount64', None)
 | 
			
		||||
 | 
			
		||||
    # Windows Vista or newer
 | 
			
		||||
    if get_tick_count_64:
 | 
			
		||||
      get_tick_count_64.restype = ctypes.c_ulonglong
 | 
			
		||||
 | 
			
		||||
      def WinNowFunctionImpl():
 | 
			
		||||
        return get_tick_count_64() / 1000.0
 | 
			
		||||
 | 
			
		||||
    else:  # Pre Vista.
 | 
			
		||||
      get_tick_count = kernel32.GetTickCount
 | 
			
		||||
      get_tick_count.restype = ctypes.c_uint32
 | 
			
		||||
      get_tick_count_lock = threading.Lock()
 | 
			
		||||
 | 
			
		||||
      def WinNowFunctionImpl():
 | 
			
		||||
        global GET_TICK_COUNT_LAST_NOW  # pylint: disable=global-statement
 | 
			
		||||
        global GET_TICK_COUNT_WRAPAROUNDS  # pylint: disable=global-statement
 | 
			
		||||
        with get_tick_count_lock:
 | 
			
		||||
          current_sample = get_tick_count()
 | 
			
		||||
          if current_sample < GET_TICK_COUNT_LAST_NOW:
 | 
			
		||||
            GET_TICK_COUNT_WRAPAROUNDS += 1
 | 
			
		||||
          GET_TICK_COUNT_LAST_NOW = current_sample
 | 
			
		||||
          final_ms = GET_TICK_COUNT_WRAPAROUNDS << 32
 | 
			
		||||
          final_ms += GET_TICK_COUNT_LAST_NOW
 | 
			
		||||
          return final_ms / 1000.0
 | 
			
		||||
 | 
			
		||||
  _NOW_FUNCTION = WinNowFunctionImpl
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def InitializeNowFunction(plat):
 | 
			
		||||
  """Sets a monotonic clock for the current platform.
 | 
			
		||||
 | 
			
		||||
    Args:
 | 
			
		||||
      plat: Platform that is being run on.
 | 
			
		||||
  """
 | 
			
		||||
  if plat.startswith(_PLATFORMS['mac']):
 | 
			
		||||
    InitializeMacNowFunction(plat)
 | 
			
		||||
 | 
			
		||||
  elif (plat.startswith(_PLATFORMS['linux'])
 | 
			
		||||
        or plat.startswith(_PLATFORMS['freebsd'])
 | 
			
		||||
        or plat.startswith(_PLATFORMS['bsd'])
 | 
			
		||||
        or plat.startswith(_PLATFORMS['sunos'])):
 | 
			
		||||
    InitializeLinuxNowFunction(plat)
 | 
			
		||||
 | 
			
		||||
  elif (plat.startswith(_PLATFORMS['windows'])
 | 
			
		||||
        or plat.startswith(_PLATFORMS['cygwin'])):
 | 
			
		||||
    InitializeWinNowFunction(plat)
 | 
			
		||||
 | 
			
		||||
  else:
 | 
			
		||||
    raise RuntimeError('%s is not a supported platform.' % plat)
 | 
			
		||||
 | 
			
		||||
  global _NOW_FUNCTION
 | 
			
		||||
  global _CLOCK
 | 
			
		||||
  assert _NOW_FUNCTION, 'Now function not properly set during initialization.'
 | 
			
		||||
  assert _CLOCK, 'Clock not properly set during initialization.'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Now():
 | 
			
		||||
  return _NOW_FUNCTION() * 1e6  # convert from seconds to microseconds
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetClock():
 | 
			
		||||
  return _CLOCK
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
InitializeNowFunction(sys.platform)
 | 
			
		||||
@ -0,0 +1,123 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import contextlib
 | 
			
		||||
import logging
 | 
			
		||||
import platform
 | 
			
		||||
import sys
 | 
			
		||||
import unittest
 | 
			
		||||
 | 
			
		||||
from py_trace_event import trace_time
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TimerTest(unittest.TestCase):
 | 
			
		||||
  # Helper methods.
 | 
			
		||||
  @contextlib.contextmanager
 | 
			
		||||
  def ReplacePlatformProcessorCall(self, f):
 | 
			
		||||
    try:
 | 
			
		||||
      old_proc = platform.processor
 | 
			
		||||
      platform.processor = f
 | 
			
		||||
      yield
 | 
			
		||||
    finally:
 | 
			
		||||
      platform.processor = old_proc
 | 
			
		||||
 | 
			
		||||
  @contextlib.contextmanager
 | 
			
		||||
  def ReplaceQPCCheck(self, f):
 | 
			
		||||
    try:
 | 
			
		||||
      old_qpc = trace_time.IsQPCUsable
 | 
			
		||||
      trace_time.IsQPCUsable = f
 | 
			
		||||
      yield
 | 
			
		||||
    finally:
 | 
			
		||||
      trace_time.IsQPCUsable = old_qpc
 | 
			
		||||
 | 
			
		||||
  # Platform detection tests.
 | 
			
		||||
  def testInitializeNowFunction_platformNotSupported(self):
 | 
			
		||||
    with self.assertRaises(RuntimeError):
 | 
			
		||||
      trace_time.InitializeNowFunction('invalid_platform')
 | 
			
		||||
 | 
			
		||||
  def testInitializeNowFunction_windows(self):
 | 
			
		||||
    if not (sys.platform.startswith(trace_time._PLATFORMS['windows'])
 | 
			
		||||
            or sys.platform.startswith(trace_time._PLATFORMS['cygwin'])):
 | 
			
		||||
      return True
 | 
			
		||||
    trace_time.InitializeNowFunction(sys.platform)
 | 
			
		||||
    self.assertTrue(trace_time.GetClock() == trace_time._WIN_HIRES
 | 
			
		||||
                    or trace_time.GetClock() == trace_time._WIN_LORES)
 | 
			
		||||
 | 
			
		||||
  def testInitializeNowFunction_linux(self):
 | 
			
		||||
    if not sys.platform.startswith(trace_time._PLATFORMS['linux']):
 | 
			
		||||
      return True
 | 
			
		||||
    trace_time.InitializeNowFunction(sys.platform)
 | 
			
		||||
    self.assertEqual(trace_time.GetClock(), trace_time._LINUX_CLOCK)
 | 
			
		||||
 | 
			
		||||
  def testInitializeNowFunction_mac(self):
 | 
			
		||||
    if not sys.platform.startswith(trace_time._PLATFORMS['mac']):
 | 
			
		||||
      return True
 | 
			
		||||
    trace_time.InitializeNowFunction(sys.platform)
 | 
			
		||||
    self.assertEqual(trace_time.GetClock(), trace_time._MAC_CLOCK)
 | 
			
		||||
 | 
			
		||||
  # Windows Tests
 | 
			
		||||
  def testIsQPCUsable_buggyAthlonProcReturnsFalse(self):
 | 
			
		||||
    if not (sys.platform.startswith(trace_time._PLATFORMS['windows'])
 | 
			
		||||
            or sys.platform.startswith(trace_time._PLATFORMS['cygwin'])):
 | 
			
		||||
      return True
 | 
			
		||||
 | 
			
		||||
    def BuggyAthlonProc():
 | 
			
		||||
      return 'AMD64 Family 15 Model 23 Stepping 6, AuthenticAMD'
 | 
			
		||||
 | 
			
		||||
    with self.ReplacePlatformProcessorCall(BuggyAthlonProc):
 | 
			
		||||
      self.assertFalse(trace_time.IsQPCUsable())
 | 
			
		||||
 | 
			
		||||
  def testIsQPCUsable_returnsTrueOnWindows(self):
 | 
			
		||||
    if not (sys.platform.startswith(trace_time._PLATFORMS['windows'])
 | 
			
		||||
            or sys.platform.startswith(trace_time._PLATFORMS['cygwin'])):
 | 
			
		||||
      return True
 | 
			
		||||
 | 
			
		||||
    def Proc():
 | 
			
		||||
      return 'Intel64 Family 15 Model 23 Stepping 6, GenuineIntel'
 | 
			
		||||
 | 
			
		||||
    with self.ReplacePlatformProcessorCall(Proc):
 | 
			
		||||
      self.assertTrue(trace_time.IsQPCUsable())
 | 
			
		||||
 | 
			
		||||
  def testGetWinNowFunction_QPC(self):
 | 
			
		||||
    if not (sys.platform.startswith(trace_time._PLATFORMS['windows'])
 | 
			
		||||
            or sys.platform.startswith(trace_time._PLATFORMS['cygwin'])):
 | 
			
		||||
      return True
 | 
			
		||||
    # Test requires QPC to be available on platform.
 | 
			
		||||
    if not trace_time.IsQPCUsable():
 | 
			
		||||
      return True
 | 
			
		||||
    self.assertGreater(trace_time.Now(), 0)
 | 
			
		||||
 | 
			
		||||
  # Works even if QPC would work.
 | 
			
		||||
  def testGetWinNowFunction_GetTickCount(self):
 | 
			
		||||
    if not (sys.platform.startswith(trace_time._PLATFORMS['windows'])
 | 
			
		||||
            or sys.platform.startswith(trace_time._PLATFORMS['cygwin'])):
 | 
			
		||||
      return True
 | 
			
		||||
    with self.ReplaceQPCCheck(lambda: False):
 | 
			
		||||
      self.assertGreater(trace_time.Now(), 0)
 | 
			
		||||
 | 
			
		||||
  # Linux tests.
 | 
			
		||||
  def testGetClockGetTimeClockNumber_linux(self):
 | 
			
		||||
    self.assertEquals(trace_time.GetClockGetTimeClockNumber('linux'), 1)
 | 
			
		||||
 | 
			
		||||
  def testGetClockGetTimeClockNumber_freebsd(self):
 | 
			
		||||
    self.assertEquals(trace_time.GetClockGetTimeClockNumber('freebsd'), 4)
 | 
			
		||||
 | 
			
		||||
  def testGetClockGetTimeClockNumber_bsd(self):
 | 
			
		||||
    self.assertEquals(trace_time.GetClockGetTimeClockNumber('bsd'), 3)
 | 
			
		||||
 | 
			
		||||
  def testGetClockGetTimeClockNumber_sunos(self):
 | 
			
		||||
    self.assertEquals(trace_time.GetClockGetTimeClockNumber('sunos5'), 4)
 | 
			
		||||
 | 
			
		||||
  # Smoke Test.
 | 
			
		||||
  def testMonotonic(self):
 | 
			
		||||
    time_one = trace_time.Now()
 | 
			
		||||
    for _ in xrange(1000):
 | 
			
		||||
      time_two = trace_time.Now()
 | 
			
		||||
      self.assertLessEqual(time_one, time_two)
 | 
			
		||||
      time_one = time_two
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  logging.getLogger().setLevel(logging.DEBUG)
 | 
			
		||||
  unittest.main(verbosity=2)
 | 
			
		||||
							
								
								
									
										12
									
								
								tools/adb/systrace/catapult/common/py_trace_event/third_party/protobuf/README.chromium
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										12
									
								
								tools/adb/systrace/catapult/common/py_trace_event/third_party/protobuf/README.chromium
									
									
									
									
										vendored
									
									
										Normal file
									
								
							@ -0,0 +1,12 @@
 | 
			
		||||
Name: Protobuf
 | 
			
		||||
URL: https://developers.google.com/protocol-buffers/
 | 
			
		||||
Version: 3.0.0
 | 
			
		||||
License: BSD
 | 
			
		||||
 | 
			
		||||
Description:
 | 
			
		||||
Protocol buffers are Google's language-neutral, platform-neutral,
 | 
			
		||||
extensible mechanism for serializing structured data.
 | 
			
		||||
 | 
			
		||||
Local Modifications:
 | 
			
		||||
Removed pretty much everything except functions necessary to write
 | 
			
		||||
bools, ints, and strings.
 | 
			
		||||
							
								
								
									
										224
									
								
								tools/adb/systrace/catapult/common/py_trace_event/third_party/protobuf/encoder.py
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										224
									
								
								tools/adb/systrace/catapult/common/py_trace_event/third_party/protobuf/encoder.py
									
									
									
									
										vendored
									
									
										Normal file
									
								
							@ -0,0 +1,224 @@
 | 
			
		||||
# Protocol Buffers - Google's data interchange format
 | 
			
		||||
# Copyright 2008 Google Inc.  All rights reserved.
 | 
			
		||||
# https://developers.google.com/protocol-buffers/
 | 
			
		||||
#
 | 
			
		||||
# Redistribution and use in source and binary forms, with or without
 | 
			
		||||
# modification, are permitted provided that the following conditions are
 | 
			
		||||
# met:
 | 
			
		||||
#
 | 
			
		||||
#     * Redistributions of source code must retain the above copyright
 | 
			
		||||
# notice, this list of conditions and the following disclaimer.
 | 
			
		||||
#     * Redistributions in binary form must reproduce the above
 | 
			
		||||
# copyright notice, this list of conditions and the following disclaimer
 | 
			
		||||
# in the documentation and/or other materials provided with the
 | 
			
		||||
# distribution.
 | 
			
		||||
#     * Neither the name of Google Inc. nor the names of its
 | 
			
		||||
# contributors may be used to endorse or promote products derived from
 | 
			
		||||
# this software without specific prior written permission.
 | 
			
		||||
#
 | 
			
		||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 | 
			
		||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 | 
			
		||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 | 
			
		||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 | 
			
		||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 | 
			
		||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 | 
			
		||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 | 
			
		||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 | 
			
		||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 | 
			
		||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 | 
			
		||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 | 
			
		||||
 | 
			
		||||
import six
 | 
			
		||||
 | 
			
		||||
import wire_format
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _VarintSize(value):
 | 
			
		||||
  """Compute the size of a varint value."""
 | 
			
		||||
  if value <= 0x7f: return 1
 | 
			
		||||
  if value <= 0x3fff: return 2
 | 
			
		||||
  if value <= 0x1fffff: return 3
 | 
			
		||||
  if value <= 0xfffffff: return 4
 | 
			
		||||
  if value <= 0x7ffffffff: return 5
 | 
			
		||||
  if value <= 0x3ffffffffff: return 6
 | 
			
		||||
  if value <= 0x1ffffffffffff: return 7
 | 
			
		||||
  if value <= 0xffffffffffffff: return 8
 | 
			
		||||
  if value <= 0x7fffffffffffffff: return 9
 | 
			
		||||
  return 10
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _SignedVarintSize(value):
 | 
			
		||||
  """Compute the size of a signed varint value."""
 | 
			
		||||
  if value < 0: return 10
 | 
			
		||||
  if value <= 0x7f: return 1
 | 
			
		||||
  if value <= 0x3fff: return 2
 | 
			
		||||
  if value <= 0x1fffff: return 3
 | 
			
		||||
  if value <= 0xfffffff: return 4
 | 
			
		||||
  if value <= 0x7ffffffff: return 5
 | 
			
		||||
  if value <= 0x3ffffffffff: return 6
 | 
			
		||||
  if value <= 0x1ffffffffffff: return 7
 | 
			
		||||
  if value <= 0xffffffffffffff: return 8
 | 
			
		||||
  if value <= 0x7fffffffffffffff: return 9
 | 
			
		||||
  return 10
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _VarintEncoder():
 | 
			
		||||
  """Return an encoder for a basic varint value (does not include tag)."""
 | 
			
		||||
 | 
			
		||||
  def EncodeVarint(write, value):
 | 
			
		||||
    bits = value & 0x7f
 | 
			
		||||
    value >>= 7
 | 
			
		||||
    while value:
 | 
			
		||||
      write(six.int2byte(0x80|bits))
 | 
			
		||||
      bits = value & 0x7f
 | 
			
		||||
      value >>= 7
 | 
			
		||||
    return write(six.int2byte(bits))
 | 
			
		||||
 | 
			
		||||
  return EncodeVarint
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _SignedVarintEncoder():
 | 
			
		||||
  """Return an encoder for a basic signed varint value (does not include
 | 
			
		||||
  tag)."""
 | 
			
		||||
 | 
			
		||||
  def EncodeSignedVarint(write, value):
 | 
			
		||||
    if value < 0:
 | 
			
		||||
      value += (1 << 64)
 | 
			
		||||
    bits = value & 0x7f
 | 
			
		||||
    value >>= 7
 | 
			
		||||
    while value:
 | 
			
		||||
      write(six.int2byte(0x80|bits))
 | 
			
		||||
      bits = value & 0x7f
 | 
			
		||||
      value >>= 7
 | 
			
		||||
    return write(six.int2byte(bits))
 | 
			
		||||
 | 
			
		||||
  return EncodeSignedVarint
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_EncodeVarint = _VarintEncoder()
 | 
			
		||||
_EncodeSignedVarint = _SignedVarintEncoder()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _VarintBytes(value):
 | 
			
		||||
  """Encode the given integer as a varint and return the bytes.  This is only
 | 
			
		||||
  called at startup time so it doesn't need to be fast."""
 | 
			
		||||
 | 
			
		||||
  pieces = []
 | 
			
		||||
  _EncodeVarint(pieces.append, value)
 | 
			
		||||
  return b"".join(pieces)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def TagBytes(field_number, wire_type):
 | 
			
		||||
  """Encode the given tag and return the bytes.  Only called at startup."""
 | 
			
		||||
 | 
			
		||||
  return _VarintBytes(wire_format.PackTag(field_number, wire_type))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _SimpleEncoder(wire_type, encode_value, compute_value_size):
 | 
			
		||||
  """Return a constructor for an encoder for fields of a particular type.
 | 
			
		||||
 | 
			
		||||
  Args:
 | 
			
		||||
      wire_type:  The field's wire type, for encoding tags.
 | 
			
		||||
      encode_value:  A function which encodes an individual value, e.g.
 | 
			
		||||
        _EncodeVarint().
 | 
			
		||||
      compute_value_size:  A function which computes the size of an individual
 | 
			
		||||
        value, e.g. _VarintSize().
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
  def SpecificEncoder(field_number, is_repeated, is_packed):
 | 
			
		||||
    if is_packed:
 | 
			
		||||
      tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
 | 
			
		||||
      local_EncodeVarint = _EncodeVarint
 | 
			
		||||
      def EncodePackedField(write, value):
 | 
			
		||||
        write(tag_bytes)
 | 
			
		||||
        size = 0
 | 
			
		||||
        for element in value:
 | 
			
		||||
          size += compute_value_size(element)
 | 
			
		||||
        local_EncodeVarint(write, size)
 | 
			
		||||
        for element in value:
 | 
			
		||||
          encode_value(write, element)
 | 
			
		||||
      return EncodePackedField
 | 
			
		||||
    elif is_repeated:
 | 
			
		||||
      tag_bytes = TagBytes(field_number, wire_type)
 | 
			
		||||
      def EncodeRepeatedField(write, value):
 | 
			
		||||
        for element in value:
 | 
			
		||||
          write(tag_bytes)
 | 
			
		||||
          encode_value(write, element)
 | 
			
		||||
      return EncodeRepeatedField
 | 
			
		||||
    else:
 | 
			
		||||
      tag_bytes = TagBytes(field_number, wire_type)
 | 
			
		||||
      def EncodeField(write, value):
 | 
			
		||||
        write(tag_bytes)
 | 
			
		||||
        return encode_value(write, value)
 | 
			
		||||
      return EncodeField
 | 
			
		||||
 | 
			
		||||
  return SpecificEncoder
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
 | 
			
		||||
    wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
 | 
			
		||||
 | 
			
		||||
UInt32Encoder = UInt64Encoder = _SimpleEncoder(
 | 
			
		||||
    wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def BoolEncoder(field_number, is_repeated, is_packed):
 | 
			
		||||
  """Returns an encoder for a boolean field."""
 | 
			
		||||
 | 
			
		||||
  false_byte = b'\x00'
 | 
			
		||||
  true_byte = b'\x01'
 | 
			
		||||
  if is_packed:
 | 
			
		||||
    tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
 | 
			
		||||
    local_EncodeVarint = _EncodeVarint
 | 
			
		||||
    def EncodePackedField(write, value):
 | 
			
		||||
      write(tag_bytes)
 | 
			
		||||
      local_EncodeVarint(write, len(value))
 | 
			
		||||
      for element in value:
 | 
			
		||||
        if element:
 | 
			
		||||
          write(true_byte)
 | 
			
		||||
        else:
 | 
			
		||||
          write(false_byte)
 | 
			
		||||
    return EncodePackedField
 | 
			
		||||
  elif is_repeated:
 | 
			
		||||
    tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
 | 
			
		||||
    def EncodeRepeatedField(write, value):
 | 
			
		||||
      for element in value:
 | 
			
		||||
        write(tag_bytes)
 | 
			
		||||
        if element:
 | 
			
		||||
          write(true_byte)
 | 
			
		||||
        else:
 | 
			
		||||
          write(false_byte)
 | 
			
		||||
    return EncodeRepeatedField
 | 
			
		||||
  else:
 | 
			
		||||
    tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
 | 
			
		||||
    def EncodeField(write, value):
 | 
			
		||||
      write(tag_bytes)
 | 
			
		||||
      if value:
 | 
			
		||||
        return write(true_byte)
 | 
			
		||||
      return write(false_byte)
 | 
			
		||||
    return EncodeField
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def StringEncoder(field_number, is_repeated, is_packed):
 | 
			
		||||
  """Returns an encoder for a string field."""
 | 
			
		||||
 | 
			
		||||
  tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
 | 
			
		||||
  local_EncodeVarint = _EncodeVarint
 | 
			
		||||
  local_len = len
 | 
			
		||||
  assert not is_packed
 | 
			
		||||
  if is_repeated:
 | 
			
		||||
    def EncodeRepeatedField(write, value):
 | 
			
		||||
      for element in value:
 | 
			
		||||
        encoded = element.encode('utf-8')
 | 
			
		||||
        write(tag)
 | 
			
		||||
        local_EncodeVarint(write, local_len(encoded))
 | 
			
		||||
        write(encoded)
 | 
			
		||||
    return EncodeRepeatedField
 | 
			
		||||
  else:
 | 
			
		||||
    def EncodeField(write, value):
 | 
			
		||||
      encoded = value.encode('utf-8')
 | 
			
		||||
      write(tag)
 | 
			
		||||
      local_EncodeVarint(write, local_len(encoded))
 | 
			
		||||
      return write(encoded)
 | 
			
		||||
    return EncodeField
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										52
									
								
								tools/adb/systrace/catapult/common/py_trace_event/third_party/protobuf/wire_format.py
									
									
									
									
										vendored
									
									
										Normal file
									
								
							
							
						
						
									
										52
									
								
								tools/adb/systrace/catapult/common/py_trace_event/third_party/protobuf/wire_format.py
									
									
									
									
										vendored
									
									
										Normal file
									
								
							@ -0,0 +1,52 @@
 | 
			
		||||
# Protocol Buffers - Google's data interchange format
 | 
			
		||||
# Copyright 2008 Google Inc.  All rights reserved.
 | 
			
		||||
# https://developers.google.com/protocol-buffers/
 | 
			
		||||
#
 | 
			
		||||
# Redistribution and use in source and binary forms, with or without
 | 
			
		||||
# modification, are permitted provided that the following conditions are
 | 
			
		||||
# met:
 | 
			
		||||
#
 | 
			
		||||
#     * Redistributions of source code must retain the above copyright
 | 
			
		||||
# notice, this list of conditions and the following disclaimer.
 | 
			
		||||
#     * Redistributions in binary form must reproduce the above
 | 
			
		||||
# copyright notice, this list of conditions and the following disclaimer
 | 
			
		||||
# in the documentation and/or other materials provided with the
 | 
			
		||||
# distribution.
 | 
			
		||||
#     * Neither the name of Google Inc. nor the names of its
 | 
			
		||||
# contributors may be used to endorse or promote products derived from
 | 
			
		||||
# this software without specific prior written permission.
 | 
			
		||||
#
 | 
			
		||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
 | 
			
		||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
 | 
			
		||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
 | 
			
		||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
 | 
			
		||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
 | 
			
		||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
 | 
			
		||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
 | 
			
		||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
 | 
			
		||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
 | 
			
		||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
 | 
			
		||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 | 
			
		||||
 | 
			
		||||
TAG_TYPE_BITS = 3  # Number of bits used to hold type info in a proto tag.
 | 
			
		||||
 | 
			
		||||
WIRETYPE_VARINT = 0
 | 
			
		||||
WIRETYPE_FIXED64 = 1
 | 
			
		||||
WIRETYPE_LENGTH_DELIMITED = 2
 | 
			
		||||
WIRETYPE_START_GROUP = 3
 | 
			
		||||
WIRETYPE_END_GROUP = 4
 | 
			
		||||
WIRETYPE_FIXED32 = 5
 | 
			
		||||
_WIRETYPE_MAX = 5
 | 
			
		||||
 | 
			
		||||
def PackTag(field_number, wire_type):
 | 
			
		||||
  """Returns an unsigned 32-bit integer that encodes the field number and
 | 
			
		||||
  wire type information in standard protocol message wire format.
 | 
			
		||||
 | 
			
		||||
  Args:
 | 
			
		||||
    field_number: Expected to be an integer in the range [1, 1 << 29)
 | 
			
		||||
    wire_type: One of the WIRETYPE_* constants.
 | 
			
		||||
  """
 | 
			
		||||
  if not 0 <= wire_type <= _WIRETYPE_MAX:
 | 
			
		||||
    raise RuntimeError('Unknown wire type: %d' % wire_type)
 | 
			
		||||
  return (field_number << TAG_TYPE_BITS) | wire_type
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										31
									
								
								tools/adb/systrace/catapult/common/py_utils/PRESUBMIT.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										31
									
								
								tools/adb/systrace/catapult/common/py_utils/PRESUBMIT.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,31 @@
 | 
			
		||||
# Copyright 2015 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def CheckChangeOnUpload(input_api, output_api):
 | 
			
		||||
  return _CommonChecks(input_api, output_api)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def CheckChangeOnCommit(input_api, output_api):
 | 
			
		||||
  return _CommonChecks(input_api, output_api)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _CommonChecks(input_api, output_api):
 | 
			
		||||
  results = []
 | 
			
		||||
  results += input_api.RunTests(input_api.canned_checks.GetPylint(
 | 
			
		||||
      input_api, output_api, extra_paths_list=_GetPathsToPrepend(input_api),
 | 
			
		||||
      pylintrc='../../pylintrc'))
 | 
			
		||||
  return results
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _GetPathsToPrepend(input_api):
 | 
			
		||||
  project_dir = input_api.PresubmitLocalPath()
 | 
			
		||||
  catapult_dir = input_api.os_path.join(project_dir, '..', '..')
 | 
			
		||||
  return [
 | 
			
		||||
      project_dir,
 | 
			
		||||
      input_api.os_path.join(catapult_dir, 'dependency_manager'),
 | 
			
		||||
      input_api.os_path.join(catapult_dir, 'devil'),
 | 
			
		||||
      input_api.os_path.join(catapult_dir, 'third_party', 'mock'),
 | 
			
		||||
      input_api.os_path.join(catapult_dir, 'third_party', 'pyfakefs'),
 | 
			
		||||
  ]
 | 
			
		||||
							
								
								
									
										38
									
								
								tools/adb/systrace/catapult/common/py_utils/bin/run_tests
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										38
									
								
								tools/adb/systrace/catapult/common/py_utils/bin/run_tests
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,38 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
 | 
			
		||||
_CATAPULT_PATH = os.path.abspath(
 | 
			
		||||
    os.path.join(os.path.dirname(__file__), '..', '..', '..'))
 | 
			
		||||
 | 
			
		||||
_PY_UTILS_PATH = os.path.abspath(
 | 
			
		||||
    os.path.join(_CATAPULT_PATH, 'common', 'py_utils'))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _RunTestsOrDie(top_level_dir):
 | 
			
		||||
  exit_code = run_with_typ.Run(top_level_dir, path=[_PY_UTILS_PATH])
 | 
			
		||||
  if exit_code:
 | 
			
		||||
    sys.exit(exit_code)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _AddToPathIfNeeded(path):
 | 
			
		||||
  if path not in sys.path:
 | 
			
		||||
    sys.path.insert(0, path)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
  _AddToPathIfNeeded(_CATAPULT_PATH)
 | 
			
		||||
 | 
			
		||||
  from hooks import install
 | 
			
		||||
  if '--no-install-hooks' in sys.argv:
 | 
			
		||||
    sys.argv.remove('--no-install-hooks')
 | 
			
		||||
  else:
 | 
			
		||||
    install.InstallHooks()
 | 
			
		||||
 | 
			
		||||
  from catapult_build import run_with_typ
 | 
			
		||||
  _RunTestsOrDie(_PY_UTILS_PATH)
 | 
			
		||||
  sys.exit(0)
 | 
			
		||||
							
								
								
									
										158
									
								
								tools/adb/systrace/catapult/common/py_utils/py_utils/__init__.py
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										158
									
								
								tools/adb/systrace/catapult/common/py_utils/py_utils/__init__.py
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,158 @@
 | 
			
		||||
#!/usr/bin/env python
 | 
			
		||||
 | 
			
		||||
# Copyright (c) 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
from __future__ import print_function
 | 
			
		||||
 | 
			
		||||
import functools
 | 
			
		||||
import inspect
 | 
			
		||||
import os
 | 
			
		||||
import sys
 | 
			
		||||
import time
 | 
			
		||||
import platform
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetCatapultDir():
 | 
			
		||||
  return os.path.normpath(
 | 
			
		||||
      os.path.join(os.path.dirname(__file__), '..', '..', '..'))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def IsRunningOnCrosDevice():
 | 
			
		||||
  """Returns True if we're on a ChromeOS device."""
 | 
			
		||||
  lsb_release = '/etc/lsb-release'
 | 
			
		||||
  if sys.platform.startswith('linux') and os.path.exists(lsb_release):
 | 
			
		||||
    with open(lsb_release, 'r') as f:
 | 
			
		||||
      res = f.read()
 | 
			
		||||
      if res.count('CHROMEOS_RELEASE_NAME'):
 | 
			
		||||
        return True
 | 
			
		||||
  return False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetHostOsName():
 | 
			
		||||
  if IsRunningOnCrosDevice():
 | 
			
		||||
    return 'chromeos'
 | 
			
		||||
  elif sys.platform.startswith('linux'):
 | 
			
		||||
    return 'linux'
 | 
			
		||||
  elif sys.platform == 'darwin':
 | 
			
		||||
    return 'mac'
 | 
			
		||||
  elif sys.platform == 'win32':
 | 
			
		||||
    return 'win'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetHostArchName():
 | 
			
		||||
  return platform.machine()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _ExecutableExtensions():
 | 
			
		||||
  # pathext is, e.g. '.com;.exe;.bat;.cmd'
 | 
			
		||||
  exts = os.getenv('PATHEXT').split(';') #e.g. ['.com','.exe','.bat','.cmd']
 | 
			
		||||
  return [x[1:].upper() for x in exts] #e.g. ['COM','EXE','BAT','CMD']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def IsExecutable(path):
 | 
			
		||||
  if os.path.isfile(path):
 | 
			
		||||
    if hasattr(os, 'name') and os.name == 'nt':
 | 
			
		||||
      return path.split('.')[-1].upper() in _ExecutableExtensions()
 | 
			
		||||
    else:
 | 
			
		||||
      return os.access(path, os.X_OK)
 | 
			
		||||
  else:
 | 
			
		||||
    return False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _AddDirToPythonPath(*path_parts):
 | 
			
		||||
  path = os.path.abspath(os.path.join(*path_parts))
 | 
			
		||||
  if os.path.isdir(path) and path not in sys.path:
 | 
			
		||||
    # Some callsite that use telemetry assumes that sys.path[0] is the directory
 | 
			
		||||
    # containing the script, so we add these extra paths to right after it.
 | 
			
		||||
    sys.path.insert(1, path)
 | 
			
		||||
 | 
			
		||||
_AddDirToPythonPath(os.path.join(GetCatapultDir(), 'devil'))
 | 
			
		||||
_AddDirToPythonPath(os.path.join(GetCatapultDir(), 'dependency_manager'))
 | 
			
		||||
_AddDirToPythonPath(os.path.join(GetCatapultDir(), 'third_party', 'mock'))
 | 
			
		||||
# mox3 is needed for pyfakefs usage, but not for pylint.
 | 
			
		||||
_AddDirToPythonPath(os.path.join(GetCatapultDir(), 'third_party', 'mox3'))
 | 
			
		||||
_AddDirToPythonPath(
 | 
			
		||||
    os.path.join(GetCatapultDir(), 'third_party', 'pyfakefs'))
 | 
			
		||||
 | 
			
		||||
from devil.utils import timeout_retry  # pylint: disable=wrong-import-position
 | 
			
		||||
from devil.utils import reraiser_thread  # pylint: disable=wrong-import-position
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Decorator that adds timeout functionality to a function.
 | 
			
		||||
def Timeout(default_timeout):
 | 
			
		||||
  return lambda func: TimeoutDeco(func, default_timeout)
 | 
			
		||||
 | 
			
		||||
# Note: Even though the "timeout" keyword argument is the only
 | 
			
		||||
# keyword argument that will need to be given to the decorated function,
 | 
			
		||||
# we still have to use the **kwargs syntax, because we have to use
 | 
			
		||||
# the *args syntax here before (since the decorator decorates functions
 | 
			
		||||
# with different numbers of positional arguments) and Python doesn't allow
 | 
			
		||||
# a single named keyword argument after *args.
 | 
			
		||||
# (e.g., 'def foo(*args, bar=42):' is a syntax error)
 | 
			
		||||
 | 
			
		||||
def TimeoutDeco(func, default_timeout):
 | 
			
		||||
  @functools.wraps(func)
 | 
			
		||||
  def RunWithTimeout(*args, **kwargs):
 | 
			
		||||
    if 'timeout' in kwargs:
 | 
			
		||||
      timeout = kwargs['timeout']
 | 
			
		||||
    else:
 | 
			
		||||
      timeout = default_timeout
 | 
			
		||||
    try:
 | 
			
		||||
      return timeout_retry.Run(func, timeout, 0, args=args)
 | 
			
		||||
    except reraiser_thread.TimeoutError:
 | 
			
		||||
      print('%s timed out.' % func.__name__)
 | 
			
		||||
      return False
 | 
			
		||||
  return RunWithTimeout
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
MIN_POLL_INTERVAL_IN_SECONDS = 0.1
 | 
			
		||||
MAX_POLL_INTERVAL_IN_SECONDS = 5
 | 
			
		||||
OUTPUT_INTERVAL_IN_SECONDS = 300
 | 
			
		||||
 | 
			
		||||
def WaitFor(condition, timeout):
 | 
			
		||||
  """Waits for up to |timeout| secs for the function |condition| to return True.
 | 
			
		||||
 | 
			
		||||
  Polling frequency is (elapsed_time / 10), with a min of .1s and max of 5s.
 | 
			
		||||
 | 
			
		||||
  Returns:
 | 
			
		||||
    Result of |condition| function (if present).
 | 
			
		||||
  """
 | 
			
		||||
  def GetConditionString():
 | 
			
		||||
    if condition.__name__ == '<lambda>':
 | 
			
		||||
      try:
 | 
			
		||||
        return inspect.getsource(condition).strip()
 | 
			
		||||
      except IOError:
 | 
			
		||||
        pass
 | 
			
		||||
    return condition.__name__
 | 
			
		||||
 | 
			
		||||
  # Do an initial check to see if its true.
 | 
			
		||||
  res = condition()
 | 
			
		||||
  if res:
 | 
			
		||||
    return res
 | 
			
		||||
  start_time = time.time()
 | 
			
		||||
  last_output_time = start_time
 | 
			
		||||
  elapsed_time = time.time() - start_time
 | 
			
		||||
  while elapsed_time < timeout:
 | 
			
		||||
    res = condition()
 | 
			
		||||
    if res:
 | 
			
		||||
      return res
 | 
			
		||||
    now = time.time()
 | 
			
		||||
    elapsed_time = now - start_time
 | 
			
		||||
    last_output_elapsed_time = now - last_output_time
 | 
			
		||||
    if last_output_elapsed_time > OUTPUT_INTERVAL_IN_SECONDS:
 | 
			
		||||
      last_output_time = time.time()
 | 
			
		||||
    poll_interval = min(max(elapsed_time / 10., MIN_POLL_INTERVAL_IN_SECONDS),
 | 
			
		||||
                        MAX_POLL_INTERVAL_IN_SECONDS)
 | 
			
		||||
    time.sleep(poll_interval)
 | 
			
		||||
  raise TimeoutException('Timed out while waiting %ds for %s.' %
 | 
			
		||||
                         (timeout, GetConditionString()))
 | 
			
		||||
 | 
			
		||||
class TimeoutException(Exception):
 | 
			
		||||
  """The operation failed to complete because of a timeout.
 | 
			
		||||
 | 
			
		||||
  It is possible that waiting for a longer period of time would result in a
 | 
			
		||||
  successful operation.
 | 
			
		||||
  """
 | 
			
		||||
  pass
 | 
			
		||||
@ -0,0 +1,21 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import atexit
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _WrapFunction(function):
 | 
			
		||||
  def _WrappedFn(*args, **kwargs):
 | 
			
		||||
    logging.debug('Try running %s', repr(function))
 | 
			
		||||
    try:
 | 
			
		||||
      function(*args, **kwargs)
 | 
			
		||||
      logging.debug('Did run %s', repr(function))
 | 
			
		||||
    except Exception:  # pylint: disable=broad-except
 | 
			
		||||
      logging.exception('Exception running %s', repr(function))
 | 
			
		||||
  return _WrappedFn
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Register(function, *args, **kwargs):
 | 
			
		||||
  atexit.register(_WrapFunction(function), *args, **kwargs)
 | 
			
		||||
@ -0,0 +1,61 @@
 | 
			
		||||
# Copyright 2015 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
import dependency_manager
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class BinaryManager(object):
 | 
			
		||||
  """ This class is effectively a subclass of dependency_manager, but uses a
 | 
			
		||||
      different number of arguments for FetchPath and LocalPath.
 | 
			
		||||
  """
 | 
			
		||||
 | 
			
		||||
  def __init__(self, config_files):
 | 
			
		||||
    if not config_files or not isinstance(config_files, list):
 | 
			
		||||
      raise ValueError(
 | 
			
		||||
          'Must supply a list of config files to the BinaryManager')
 | 
			
		||||
    configs = [dependency_manager.BaseConfig(config) for config in config_files]
 | 
			
		||||
    self._dependency_manager = dependency_manager.DependencyManager(configs)
 | 
			
		||||
 | 
			
		||||
  def FetchPathWithVersion(self, binary_name, os_name, arch, os_version=None):
 | 
			
		||||
    """ Return a path to the executable for <binary_name>, or None if not found.
 | 
			
		||||
 | 
			
		||||
    Will attempt to download from cloud storage if needed.
 | 
			
		||||
    """
 | 
			
		||||
    return self._WrapDependencyManagerFunction(
 | 
			
		||||
        self._dependency_manager.FetchPathWithVersion, binary_name, os_name,
 | 
			
		||||
        arch, os_version)
 | 
			
		||||
 | 
			
		||||
  def FetchPath(self, binary_name, os_name, arch, os_version=None):
 | 
			
		||||
    """ Return a path to the executable for <binary_name>, or None if not found.
 | 
			
		||||
 | 
			
		||||
    Will attempt to download from cloud storage if needed.
 | 
			
		||||
    """
 | 
			
		||||
    return self._WrapDependencyManagerFunction(
 | 
			
		||||
        self._dependency_manager.FetchPath, binary_name, os_name, arch,
 | 
			
		||||
        os_version)
 | 
			
		||||
 | 
			
		||||
  def LocalPath(self, binary_name, os_name, arch, os_version=None):
 | 
			
		||||
    """ Return a local path to the given binary name, or None if not found.
 | 
			
		||||
 | 
			
		||||
    Will not download from cloud_storage.
 | 
			
		||||
    """
 | 
			
		||||
    return self._WrapDependencyManagerFunction(
 | 
			
		||||
        self._dependency_manager.LocalPath, binary_name, os_name, arch,
 | 
			
		||||
        os_version)
 | 
			
		||||
 | 
			
		||||
  def _WrapDependencyManagerFunction(
 | 
			
		||||
      self, function, binary_name, os_name, arch, os_version):
 | 
			
		||||
    platform = '%s_%s' % (os_name, arch)
 | 
			
		||||
    if os_version:
 | 
			
		||||
      try:
 | 
			
		||||
        versioned_platform = '%s_%s_%s' % (os_name, os_version, arch)
 | 
			
		||||
        return function(binary_name, versioned_platform)
 | 
			
		||||
      except dependency_manager.NoPathFoundError:
 | 
			
		||||
        logging.warning(
 | 
			
		||||
            'Cannot find path for %s on platform %s. Falling back to %s.',
 | 
			
		||||
            binary_name, versioned_platform, platform)
 | 
			
		||||
    return function(binary_name, platform)
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,214 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import json
 | 
			
		||||
import os
 | 
			
		||||
 | 
			
		||||
from pyfakefs import fake_filesystem_unittest
 | 
			
		||||
from dependency_manager import exceptions
 | 
			
		||||
 | 
			
		||||
from py_utils import binary_manager
 | 
			
		||||
 | 
			
		||||
class BinaryManagerTest(fake_filesystem_unittest.TestCase):
 | 
			
		||||
  # TODO(aiolos): disable cloud storage use during this test.
 | 
			
		||||
 | 
			
		||||
  def setUp(self):
 | 
			
		||||
    self.setUpPyfakefs()
 | 
			
		||||
    # pylint: disable=bad-continuation
 | 
			
		||||
    self.expected_dependencies = {
 | 
			
		||||
        'dep_1': {
 | 
			
		||||
          'cloud_storage_base_folder': 'dependencies/fake_config',
 | 
			
		||||
          'cloud_storage_bucket': 'chrome-tel',
 | 
			
		||||
          'file_info': {
 | 
			
		||||
            'linux_x86_64': {
 | 
			
		||||
              'cloud_storage_hash': '661ce936b3276f7ec3d687ab62be05b96d796f21',
 | 
			
		||||
              'download_path': 'bin/linux/x86_64/dep_1'
 | 
			
		||||
            },
 | 
			
		||||
            'mac_x86_64': {
 | 
			
		||||
              'cloud_storage_hash': 'c7b1bfc6399dc683058e88dac1ef0f877edea74b',
 | 
			
		||||
              'download_path': 'bin/mac/x86_64/dep_1'
 | 
			
		||||
            },
 | 
			
		||||
            'win_AMD64': {
 | 
			
		||||
              'cloud_storage_hash': 'ac4fee89a51662b9d920bce443c19b9b2929b198',
 | 
			
		||||
              'download_path': 'bin/win/AMD64/dep_1.exe'
 | 
			
		||||
            },
 | 
			
		||||
            'win_x86': {
 | 
			
		||||
              'cloud_storage_hash': 'e246e183553ea26967d7b323ea269e3357b9c837',
 | 
			
		||||
              'download_path': 'bin/win/x86/dep_1.exe'
 | 
			
		||||
            }
 | 
			
		||||
          }
 | 
			
		||||
        },
 | 
			
		||||
        'dep_2': {
 | 
			
		||||
          'cloud_storage_base_folder': 'dependencies/fake_config',
 | 
			
		||||
          'cloud_storage_bucket': 'chrome-tel',
 | 
			
		||||
          'file_info': {
 | 
			
		||||
            'linux_x86_64': {
 | 
			
		||||
              'cloud_storage_hash': '13a57efae9a680ac0f160b3567e02e81f4ac493c',
 | 
			
		||||
              'download_path': 'bin/linux/x86_64/dep_2',
 | 
			
		||||
              'local_paths': [
 | 
			
		||||
                  '../../example/location/linux/dep_2',
 | 
			
		||||
                  '../../example/location2/linux/dep_2'
 | 
			
		||||
              ]
 | 
			
		||||
            },
 | 
			
		||||
            'mac_x86_64': {
 | 
			
		||||
              'cloud_storage_hash': 'd10c0ddaa8586b20449e951216bee852fa0f8850',
 | 
			
		||||
              'download_path': 'bin/mac/x86_64/dep_2',
 | 
			
		||||
              'local_paths': [
 | 
			
		||||
                  '../../example/location/mac/dep_2',
 | 
			
		||||
                  '../../example/location2/mac/dep_2'
 | 
			
		||||
              ]
 | 
			
		||||
            },
 | 
			
		||||
            'win_AMD64': {
 | 
			
		||||
              'cloud_storage_hash': 'fd5b417f78c7f7d9192a98967058709ded1d399d',
 | 
			
		||||
              'download_path': 'bin/win/AMD64/dep_2.exe',
 | 
			
		||||
              'local_paths': [
 | 
			
		||||
                  '../../example/location/win64/dep_2',
 | 
			
		||||
                  '../../example/location2/win64/dep_2'
 | 
			
		||||
              ]
 | 
			
		||||
            },
 | 
			
		||||
            'win_x86': {
 | 
			
		||||
              'cloud_storage_hash': 'cf5c8fe920378ce30d057e76591d57f63fd31c1a',
 | 
			
		||||
              'download_path': 'bin/win/x86/dep_2.exe',
 | 
			
		||||
              'local_paths': [
 | 
			
		||||
                  '../../example/location/win32/dep_2',
 | 
			
		||||
                  '../../example/location2/win32/dep_2'
 | 
			
		||||
              ]
 | 
			
		||||
            },
 | 
			
		||||
            'android_k_x64': {
 | 
			
		||||
              'cloud_storage_hash': '09177be2fed00b44df0e777932828425440b23b3',
 | 
			
		||||
              'download_path': 'bin/android/x64/k/dep_2.apk',
 | 
			
		||||
              'local_paths': [
 | 
			
		||||
                  '../../example/location/android_x64/k/dep_2',
 | 
			
		||||
                  '../../example/location2/android_x64/k/dep_2'
 | 
			
		||||
              ]
 | 
			
		||||
            },
 | 
			
		||||
            'android_l_x64': {
 | 
			
		||||
              'cloud_storage_hash': '09177be2fed00b44df0e777932828425440b23b3',
 | 
			
		||||
              'download_path': 'bin/android/x64/l/dep_2.apk',
 | 
			
		||||
              'local_paths': [
 | 
			
		||||
                  '../../example/location/android_x64/l/dep_2',
 | 
			
		||||
                  '../../example/location2/android_x64/l/dep_2'
 | 
			
		||||
              ]
 | 
			
		||||
            },
 | 
			
		||||
            'android_k_x86': {
 | 
			
		||||
              'cloud_storage_hash': 'bcf02af039713a48b69b89bd7f0f9c81ed8183a4',
 | 
			
		||||
              'download_path': 'bin/android/x86/k/dep_2.apk',
 | 
			
		||||
              'local_paths': [
 | 
			
		||||
                  '../../example/location/android_x86/k/dep_2',
 | 
			
		||||
                  '../../example/location2/android_x86/k/dep_2'
 | 
			
		||||
              ]
 | 
			
		||||
            },
 | 
			
		||||
            'android_l_x86': {
 | 
			
		||||
              'cloud_storage_hash': '12a74cec071017ba11655b5740b8a58e2f52a219',
 | 
			
		||||
              'download_path': 'bin/android/x86/l/dep_2.apk',
 | 
			
		||||
              'local_paths': [
 | 
			
		||||
                  '../../example/location/android_x86/l/dep_2',
 | 
			
		||||
                  '../../example/location2/android_x86/l/dep_2'
 | 
			
		||||
              ]
 | 
			
		||||
            }
 | 
			
		||||
          }
 | 
			
		||||
        },
 | 
			
		||||
        'dep_3': {
 | 
			
		||||
          'file_info': {
 | 
			
		||||
            'linux_x86_64': {
 | 
			
		||||
              'local_paths': [
 | 
			
		||||
                  '../../example/location/linux/dep_3',
 | 
			
		||||
                  '../../example/location2/linux/dep_3'
 | 
			
		||||
              ]
 | 
			
		||||
            },
 | 
			
		||||
            'mac_x86_64': {
 | 
			
		||||
              'local_paths': [
 | 
			
		||||
                  '../../example/location/mac/dep_3',
 | 
			
		||||
                  '../../example/location2/mac/dep_3'
 | 
			
		||||
              ]
 | 
			
		||||
            },
 | 
			
		||||
            'win_AMD64': {
 | 
			
		||||
              'local_paths': [
 | 
			
		||||
                  '../../example/location/win64/dep_3',
 | 
			
		||||
                  '../../example/location2/win64/dep_3'
 | 
			
		||||
              ]
 | 
			
		||||
            },
 | 
			
		||||
            'win_x86': {
 | 
			
		||||
              'local_paths': [
 | 
			
		||||
                  '../../example/location/win32/dep_3',
 | 
			
		||||
                  '../../example/location2/win32/dep_3'
 | 
			
		||||
              ]
 | 
			
		||||
            }
 | 
			
		||||
          }
 | 
			
		||||
        }
 | 
			
		||||
    }
 | 
			
		||||
    # pylint: enable=bad-continuation
 | 
			
		||||
    fake_config = {
 | 
			
		||||
        'config_type': 'BaseConfig',
 | 
			
		||||
        'dependencies': self.expected_dependencies
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    self.base_config = os.path.join(os.path.dirname(__file__),
 | 
			
		||||
                                    'example_config.json')
 | 
			
		||||
    self.fs.CreateFile(self.base_config, contents=json.dumps(fake_config))
 | 
			
		||||
    linux_file = os.path.join(
 | 
			
		||||
        os.path.dirname(self.base_config),
 | 
			
		||||
        os.path.join('..', '..', 'example', 'location2', 'linux', 'dep_2'))
 | 
			
		||||
    android_file = os.path.join(
 | 
			
		||||
        os.path.dirname(self.base_config),
 | 
			
		||||
        '..', '..', 'example', 'location', 'android_x86', 'l', 'dep_2')
 | 
			
		||||
    self.expected_dep2_linux_file = os.path.abspath(linux_file)
 | 
			
		||||
    self.expected_dep2_android_file = os.path.abspath(android_file)
 | 
			
		||||
    self.fs.CreateFile(self.expected_dep2_linux_file)
 | 
			
		||||
    self.fs.CreateFile(self.expected_dep2_android_file)
 | 
			
		||||
 | 
			
		||||
  def tearDown(self):
 | 
			
		||||
    self.tearDownPyfakefs()
 | 
			
		||||
 | 
			
		||||
  def testInitializationNoConfig(self):
 | 
			
		||||
    with self.assertRaises(ValueError):
 | 
			
		||||
      binary_manager.BinaryManager(None)
 | 
			
		||||
 | 
			
		||||
  def testInitializationMissingConfig(self):
 | 
			
		||||
    with self.assertRaises(ValueError):
 | 
			
		||||
      binary_manager.BinaryManager(os.path.join('missing', 'path'))
 | 
			
		||||
 | 
			
		||||
  def testInitializationWithConfig(self):
 | 
			
		||||
    with self.assertRaises(ValueError):
 | 
			
		||||
      manager = binary_manager.BinaryManager(self.base_config)
 | 
			
		||||
    manager = binary_manager.BinaryManager([self.base_config])
 | 
			
		||||
    self.assertItemsEqual(self.expected_dependencies,
 | 
			
		||||
                          manager._dependency_manager._lookup_dict)
 | 
			
		||||
 | 
			
		||||
  def testSuccessfulFetchPathNoOsVersion(self):
 | 
			
		||||
    manager = binary_manager.BinaryManager([self.base_config])
 | 
			
		||||
    found_path = manager.FetchPath('dep_2', 'linux', 'x86_64')
 | 
			
		||||
    self.assertEqual(self.expected_dep2_linux_file, found_path)
 | 
			
		||||
 | 
			
		||||
  def testSuccessfulFetchPathOsVersion(self):
 | 
			
		||||
    manager = binary_manager.BinaryManager([self.base_config])
 | 
			
		||||
    found_path = manager.FetchPath('dep_2', 'android', 'x86', 'l')
 | 
			
		||||
    self.assertEqual(self.expected_dep2_android_file, found_path)
 | 
			
		||||
 | 
			
		||||
  def testSuccessfulFetchPathFallbackToNoOsVersion(self):
 | 
			
		||||
    manager = binary_manager.BinaryManager([self.base_config])
 | 
			
		||||
    found_path = manager.FetchPath('dep_2', 'linux', 'x86_64', 'fake_version')
 | 
			
		||||
    self.assertEqual(self.expected_dep2_linux_file, found_path)
 | 
			
		||||
 | 
			
		||||
  def testFailedFetchPathMissingDep(self):
 | 
			
		||||
    manager = binary_manager.BinaryManager([self.base_config])
 | 
			
		||||
    with self.assertRaises(exceptions.NoPathFoundError):
 | 
			
		||||
      manager.FetchPath('missing_dep', 'linux', 'x86_64')
 | 
			
		||||
    with self.assertRaises(exceptions.NoPathFoundError):
 | 
			
		||||
      manager.FetchPath('missing_dep', 'android', 'x86', 'l')
 | 
			
		||||
    with self.assertRaises(exceptions.NoPathFoundError):
 | 
			
		||||
      manager.FetchPath('dep_1', 'linux', 'bad_arch')
 | 
			
		||||
    with self.assertRaises(exceptions.NoPathFoundError):
 | 
			
		||||
      manager.FetchPath('dep_1', 'bad_os', 'x86')
 | 
			
		||||
 | 
			
		||||
  def testSuccessfulLocalPathNoOsVersion(self):
 | 
			
		||||
    manager = binary_manager.BinaryManager([self.base_config])
 | 
			
		||||
    found_path = manager.LocalPath('dep_2', 'linux', 'x86_64')
 | 
			
		||||
    self.assertEqual(self.expected_dep2_linux_file, found_path)
 | 
			
		||||
 | 
			
		||||
  def testSuccessfulLocalPathOsVersion(self):
 | 
			
		||||
    manager = binary_manager.BinaryManager([self.base_config])
 | 
			
		||||
    found_path = manager.LocalPath('dep_2', 'android', 'x86', 'l')
 | 
			
		||||
    self.assertEqual(self.expected_dep2_android_file, found_path)
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,34 @@
 | 
			
		||||
# Copyright 2013 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
from __future__ import absolute_import
 | 
			
		||||
from __future__ import division
 | 
			
		||||
from __future__ import print_function
 | 
			
		||||
import re
 | 
			
		||||
import six
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def ToUnderscore(obj):
 | 
			
		||||
  """Converts a string, list, or dict from camelCase to lower_with_underscores.
 | 
			
		||||
 | 
			
		||||
  Descends recursively into lists and dicts, converting all dict keys.
 | 
			
		||||
  Returns a newly allocated object of the same structure as the input.
 | 
			
		||||
  """
 | 
			
		||||
  if isinstance(obj, six.string_types):
 | 
			
		||||
    return re.sub('(?!^)([A-Z]+)', r'_\1', obj).lower()
 | 
			
		||||
 | 
			
		||||
  elif isinstance(obj, list):
 | 
			
		||||
    return [ToUnderscore(item) for item in obj]
 | 
			
		||||
 | 
			
		||||
  elif isinstance(obj, dict):
 | 
			
		||||
    output = {}
 | 
			
		||||
    for k, v in six.iteritems(obj):
 | 
			
		||||
      if isinstance(v, list) or isinstance(v, dict):
 | 
			
		||||
        output[ToUnderscore(k)] = ToUnderscore(v)
 | 
			
		||||
      else:
 | 
			
		||||
        output[ToUnderscore(k)] = v
 | 
			
		||||
    return output
 | 
			
		||||
 | 
			
		||||
  else:
 | 
			
		||||
    return obj
 | 
			
		||||
@ -0,0 +1,50 @@
 | 
			
		||||
# Copyright 2013 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import unittest
 | 
			
		||||
 | 
			
		||||
from py_utils import camel_case
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class CamelCaseTest(unittest.TestCase):
 | 
			
		||||
 | 
			
		||||
  def testString(self):
 | 
			
		||||
    self.assertEqual(camel_case.ToUnderscore('camelCase'), 'camel_case')
 | 
			
		||||
    self.assertEqual(camel_case.ToUnderscore('CamelCase'), 'camel_case')
 | 
			
		||||
    self.assertEqual(camel_case.ToUnderscore('Camel2Case'), 'camel2_case')
 | 
			
		||||
    self.assertEqual(camel_case.ToUnderscore('Camel2Case2'), 'camel2_case2')
 | 
			
		||||
    self.assertEqual(camel_case.ToUnderscore('2012Q3'), '2012_q3')
 | 
			
		||||
 | 
			
		||||
  def testList(self):
 | 
			
		||||
    camel_case_list = ['CamelCase', ['NestedList']]
 | 
			
		||||
    underscore_list = ['camel_case', ['nested_list']]
 | 
			
		||||
    self.assertEqual(camel_case.ToUnderscore(camel_case_list), underscore_list)
 | 
			
		||||
 | 
			
		||||
  def testDict(self):
 | 
			
		||||
    camel_case_dict = {
 | 
			
		||||
        'gpu': {
 | 
			
		||||
            'vendorId': 1000,
 | 
			
		||||
            'deviceId': 2000,
 | 
			
		||||
            'vendorString': 'aString',
 | 
			
		||||
            'deviceString': 'bString'},
 | 
			
		||||
        'secondaryGpus': [
 | 
			
		||||
            {'vendorId': 3000, 'deviceId': 4000,
 | 
			
		||||
             'vendorString': 'k', 'deviceString': 'l'}
 | 
			
		||||
        ]
 | 
			
		||||
    }
 | 
			
		||||
    underscore_dict = {
 | 
			
		||||
        'gpu': {
 | 
			
		||||
            'vendor_id': 1000,
 | 
			
		||||
            'device_id': 2000,
 | 
			
		||||
            'vendor_string': 'aString',
 | 
			
		||||
            'device_string': 'bString'},
 | 
			
		||||
        'secondary_gpus': [
 | 
			
		||||
            {'vendor_id': 3000, 'device_id': 4000,
 | 
			
		||||
             'vendor_string': 'k', 'device_string': 'l'}
 | 
			
		||||
        ]
 | 
			
		||||
    }
 | 
			
		||||
    self.assertEqual(camel_case.ToUnderscore(camel_case_dict), underscore_dict)
 | 
			
		||||
 | 
			
		||||
  def testOther(self):
 | 
			
		||||
    self.assertEqual(camel_case.ToUnderscore(self), self)
 | 
			
		||||
@ -0,0 +1,126 @@
 | 
			
		||||
{
 | 
			
		||||
  "config_type": "BaseConfig",
 | 
			
		||||
  "dependencies": {
 | 
			
		||||
    "chrome_canary": {
 | 
			
		||||
      "cloud_storage_base_folder": "binary_dependencies",
 | 
			
		||||
      "cloud_storage_bucket": "chrome-telemetry",
 | 
			
		||||
      "file_info": {
 | 
			
		||||
        "mac_x86_64": {
 | 
			
		||||
          "cloud_storage_hash": "381a491e14ab523b8db4cdf3c993713678237af8",
 | 
			
		||||
          "download_path": "bin/reference_builds/chrome-mac64.zip",
 | 
			
		||||
          "path_within_archive": "chrome-mac/Google Chrome.app/Contents/MacOS/Google Chrome",
 | 
			
		||||
          "version_in_cs": "77.0.3822.0"
 | 
			
		||||
        },
 | 
			
		||||
        "win_AMD64": {
 | 
			
		||||
          "cloud_storage_hash": "600ee522c410efe1de2f593c0efc32ae113a7d99",
 | 
			
		||||
          "download_path": "bin\\reference_build\\chrome-win64-clang.zip",
 | 
			
		||||
          "path_within_archive": "chrome-win64-clang\\chrome.exe",
 | 
			
		||||
          "version_in_cs": "77.0.3822.0"
 | 
			
		||||
        },
 | 
			
		||||
        "win_x86": {
 | 
			
		||||
          "cloud_storage_hash": "5b79a181bfbd94d8288529b0da1defa3ef097197",
 | 
			
		||||
          "download_path": "bin\\reference_build\\chrome-win32-clang.zip",
 | 
			
		||||
          "path_within_archive": "chrome-win32-clang\\chrome.exe",
 | 
			
		||||
          "version_in_cs": "77.0.3822.0"
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "chrome_dev": {
 | 
			
		||||
      "cloud_storage_base_folder": "binary_dependencies",
 | 
			
		||||
      "cloud_storage_bucket": "chrome-telemetry",
 | 
			
		||||
      "file_info": {
 | 
			
		||||
        "linux_x86_64": {
 | 
			
		||||
          "cloud_storage_hash": "61d68a6b00f25c964f5162f5251962468c886f3a",
 | 
			
		||||
          "download_path": "bin/reference_build/chrome-linux64.zip",
 | 
			
		||||
          "path_within_archive": "chrome-linux64/chrome",
 | 
			
		||||
          "version_in_cs": "76.0.3809.21"
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "chrome_stable": {
 | 
			
		||||
      "cloud_storage_base_folder": "binary_dependencies",
 | 
			
		||||
      "cloud_storage_bucket": "chrome-telemetry",
 | 
			
		||||
      "file_info": {
 | 
			
		||||
        "android_k_armeabi-v7a": {
 | 
			
		||||
          "cloud_storage_hash": "28b913c720d56a30c092625c7862f00175a316c7",
 | 
			
		||||
          "download_path": "bin/reference_build/android_k_armeabi-v7a/ChromeStable.apk",
 | 
			
		||||
          "version_in_cs": "75.0.3770.67"
 | 
			
		||||
        },
 | 
			
		||||
        "android_l_arm64-v8a": {
 | 
			
		||||
          "cloud_storage_hash": "4b953c33c61f94c2198e8001d0d8142c6504a875",
 | 
			
		||||
          "download_path": "bin/reference_build/android_l_arm64-v8a/ChromeStable.apk",
 | 
			
		||||
          "version_in_cs": "75.0.3770.67"
 | 
			
		||||
        },
 | 
			
		||||
        "android_l_armeabi-v7a": {
 | 
			
		||||
          "cloud_storage_hash": "28b913c720d56a30c092625c7862f00175a316c7",
 | 
			
		||||
          "download_path": "bin/reference_build/android_l_armeabi-v7a/ChromeStable.apk",
 | 
			
		||||
          "version_in_cs": "75.0.3770.67"
 | 
			
		||||
        },
 | 
			
		||||
        "android_n_arm64-v8a": {
 | 
			
		||||
          "cloud_storage_hash": "84152ba8f7a25cacc79d588ed827ea75f0e4ab94",
 | 
			
		||||
          "download_path": "bin/reference_build/android_n_arm64-v8a/Monochrome.apk",
 | 
			
		||||
          "version_in_cs": "75.0.3770.67"
 | 
			
		||||
        },
 | 
			
		||||
        "android_n_armeabi-v7a": {
 | 
			
		||||
          "cloud_storage_hash": "656bb9e3982d0d35decd5347ced2c320a7267f33",
 | 
			
		||||
          "download_path": "bin/reference_build/android_n_armeabi-v7a/Monochrome.apk",
 | 
			
		||||
          "version_in_cs": "75.0.3770.67"
 | 
			
		||||
        },
 | 
			
		||||
        "linux_x86_64": {
 | 
			
		||||
          "cloud_storage_hash": "dee8469e8dcd8453efd33f3a00d7ea302a126a4b",
 | 
			
		||||
          "download_path": "bin/reference_build/chrome-linux64.zip",
 | 
			
		||||
          "path_within_archive": "chrome-linux64/chrome",
 | 
			
		||||
          "version_in_cs": "75.0.3770.80"
 | 
			
		||||
        },
 | 
			
		||||
        "mac_x86_64": {
 | 
			
		||||
          "cloud_storage_hash": "16a43a1e794bb99ec1ebcd40569084985b3c6626",
 | 
			
		||||
          "download_path": "bin/reference_builds/chrome-mac64.zip",
 | 
			
		||||
          "path_within_archive": "chrome-mac/Google Chrome.app/Contents/MacOS/Google Chrome",
 | 
			
		||||
          "version_in_cs": "75.0.3770.80"
 | 
			
		||||
        },
 | 
			
		||||
        "win_AMD64": {
 | 
			
		||||
          "cloud_storage_hash": "1ec52bd4164f2d93c53113a093dae9e041eb2d73",
 | 
			
		||||
          "download_path": "bin\\reference_build\\chrome-win64-clang.zip",
 | 
			
		||||
          "path_within_archive": "chrome-win64-clang\\chrome.exe",
 | 
			
		||||
          "version_in_cs": "75.0.3770.80"
 | 
			
		||||
        },
 | 
			
		||||
        "win_x86": {
 | 
			
		||||
          "cloud_storage_hash": "0f9eb991ba618dc61f2063ea252f44be94c2252e",
 | 
			
		||||
          "download_path": "bin\\reference_build\\chrome-win-clang.zip",
 | 
			
		||||
          "path_within_archive": "chrome-win-clang\\chrome.exe",
 | 
			
		||||
          "version_in_cs": "75.0.3770.80"
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "chrome_m72": {
 | 
			
		||||
      "cloud_storage_base_folder": "binary_dependencies",
 | 
			
		||||
      "cloud_storage_bucket": "chrome-telemetry",
 | 
			
		||||
      "file_info": {
 | 
			
		||||
        "linux_x86_64": {
 | 
			
		||||
          "cloud_storage_hash": "537c19346b20340cc6807242e1eb6d82dfcfa2e8",
 | 
			
		||||
          "download_path": "bin/reference_build/chrome-linux64.zip",
 | 
			
		||||
          "path_within_archive": "chrome-linux64/chrome",
 | 
			
		||||
          "version_in_cs": "72.0.3626.119"
 | 
			
		||||
        },
 | 
			
		||||
        "mac_x86_64": {
 | 
			
		||||
          "cloud_storage_hash": "7f6a931f696f57561703538c6f799781d6e22e7e",
 | 
			
		||||
          "download_path": "bin/reference_builds/chrome-mac64.zip",
 | 
			
		||||
          "path_within_archive": "chrome-mac/Google Chrome.app/Contents/MacOS/Google Chrome",
 | 
			
		||||
          "version_in_cs": "72.0.3626.119"
 | 
			
		||||
        },
 | 
			
		||||
        "win_AMD64": {
 | 
			
		||||
          "cloud_storage_hash": "563d7985c85bfe77e92b8253d0389ff8551018c7",
 | 
			
		||||
          "download_path": "bin\\reference_build\\chrome-win64-clang.zip",
 | 
			
		||||
          "path_within_archive": "chrome-win64-clang\\chrome.exe",
 | 
			
		||||
          "version_in_cs": "72.0.3626.119"
 | 
			
		||||
        },
 | 
			
		||||
        "win_x86": {
 | 
			
		||||
          "cloud_storage_hash": "1802179da16e44b83bd3f0b296f9e5b0b053d59c",
 | 
			
		||||
          "download_path": "bin\\reference_build\\chrome-win-clang.zip",
 | 
			
		||||
          "path_within_archive": "chrome-win-clang\\chrome.exe",
 | 
			
		||||
          "version_in_cs": "72.0.3626.119"
 | 
			
		||||
        }
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@ -0,0 +1,26 @@
 | 
			
		||||
# Copyright 2017 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import inspect
 | 
			
		||||
 | 
			
		||||
def IsMethodOverridden(parent_cls, child_cls, method_name):
 | 
			
		||||
  assert inspect.isclass(parent_cls), '%s should be a class' % parent_cls
 | 
			
		||||
  assert inspect.isclass(child_cls), '%s should be a class' % child_cls
 | 
			
		||||
  assert parent_cls.__dict__.get(method_name), '%s has no method %s' % (
 | 
			
		||||
      parent_cls, method_name)
 | 
			
		||||
 | 
			
		||||
  if child_cls.__dict__.get(method_name):
 | 
			
		||||
    # It's overridden
 | 
			
		||||
    return True
 | 
			
		||||
 | 
			
		||||
  if parent_cls in child_cls.__bases__:
 | 
			
		||||
    # The parent is the base class of the child, we did not find the
 | 
			
		||||
    # overridden method.
 | 
			
		||||
    return False
 | 
			
		||||
 | 
			
		||||
  # For all the base classes of this class that are not object, check if
 | 
			
		||||
  # they override the method.
 | 
			
		||||
  base_cls = [cls for cls in child_cls.__bases__ if cls and cls != object]
 | 
			
		||||
  return any(
 | 
			
		||||
      IsMethodOverridden(parent_cls, base, method_name) for base in base_cls)
 | 
			
		||||
@ -0,0 +1,138 @@
 | 
			
		||||
# Copyright 2017 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import unittest
 | 
			
		||||
 | 
			
		||||
from py_utils import class_util
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ClassUtilTest(unittest.TestCase):
 | 
			
		||||
 | 
			
		||||
  def testClassOverridden(self):
 | 
			
		||||
    class Parent(object):
 | 
			
		||||
      def MethodShouldBeOverridden(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Child(Parent):
 | 
			
		||||
      def MethodShouldBeOverridden(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    self.assertTrue(class_util.IsMethodOverridden(
 | 
			
		||||
        Parent, Child, 'MethodShouldBeOverridden'))
 | 
			
		||||
 | 
			
		||||
  def testGrandchildOverridden(self):
 | 
			
		||||
    class Parent(object):
 | 
			
		||||
      def MethodShouldBeOverridden(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Child(Parent):
 | 
			
		||||
      pass
 | 
			
		||||
 | 
			
		||||
    class Grandchild(Child):
 | 
			
		||||
      def MethodShouldBeOverridden(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    self.assertTrue(class_util.IsMethodOverridden(
 | 
			
		||||
        Parent, Grandchild, 'MethodShouldBeOverridden'))
 | 
			
		||||
 | 
			
		||||
  def testClassNotOverridden(self):
 | 
			
		||||
    class Parent(object):
 | 
			
		||||
      def MethodShouldBeOverridden(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Child(Parent):
 | 
			
		||||
      def SomeOtherMethod(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    self.assertFalse(class_util.IsMethodOverridden(
 | 
			
		||||
        Parent, Child, 'MethodShouldBeOverridden'))
 | 
			
		||||
 | 
			
		||||
  def testGrandchildNotOverridden(self):
 | 
			
		||||
    class Parent(object):
 | 
			
		||||
      def MethodShouldBeOverridden(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Child(Parent):
 | 
			
		||||
      def MethodShouldBeOverridden(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Grandchild(Child):
 | 
			
		||||
      def SomeOtherMethod(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    self.assertTrue(class_util.IsMethodOverridden(
 | 
			
		||||
        Parent, Grandchild, 'MethodShouldBeOverridden'))
 | 
			
		||||
 | 
			
		||||
  def testClassNotPresentInParent(self):
 | 
			
		||||
    class Parent(object):
 | 
			
		||||
      def MethodShouldBeOverridden(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Child(Parent):
 | 
			
		||||
      def MethodShouldBeOverridden(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    self.assertRaises(
 | 
			
		||||
        AssertionError, class_util.IsMethodOverridden,
 | 
			
		||||
        Parent, Child, 'WrongMethod')
 | 
			
		||||
 | 
			
		||||
  def testInvalidClass(self):
 | 
			
		||||
    class Foo(object):
 | 
			
		||||
      def Bar(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    self.assertRaises(
 | 
			
		||||
        AssertionError, class_util.IsMethodOverridden, 'invalid', Foo, 'Bar')
 | 
			
		||||
 | 
			
		||||
    self.assertRaises(
 | 
			
		||||
        AssertionError, class_util.IsMethodOverridden, Foo, 'invalid', 'Bar')
 | 
			
		||||
 | 
			
		||||
  def testMultipleInheritance(self):
 | 
			
		||||
    class Aaa(object):
 | 
			
		||||
      def One(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Bbb(object):
 | 
			
		||||
      def Two(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Ccc(Aaa, Bbb):
 | 
			
		||||
      pass
 | 
			
		||||
 | 
			
		||||
    class Ddd(object):
 | 
			
		||||
      def Three(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Eee(Ddd):
 | 
			
		||||
      def Three(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Fff(Ccc, Eee):
 | 
			
		||||
      def One(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Ggg(object):
 | 
			
		||||
      def Four(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Hhh(Fff, Ggg):
 | 
			
		||||
      def Two(self):
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    class Iii(Hhh):
 | 
			
		||||
      pass
 | 
			
		||||
 | 
			
		||||
    class Jjj(Iii):
 | 
			
		||||
      pass
 | 
			
		||||
 | 
			
		||||
    self.assertFalse(class_util.IsMethodOverridden(Aaa, Ccc, 'One'))
 | 
			
		||||
    self.assertTrue(class_util.IsMethodOverridden(Aaa, Fff, 'One'))
 | 
			
		||||
    self.assertTrue(class_util.IsMethodOverridden(Aaa, Hhh, 'One'))
 | 
			
		||||
    self.assertTrue(class_util.IsMethodOverridden(Aaa, Jjj, 'One'))
 | 
			
		||||
    self.assertFalse(class_util.IsMethodOverridden(Bbb, Ccc, 'Two'))
 | 
			
		||||
    self.assertTrue(class_util.IsMethodOverridden(Bbb, Hhh, 'Two'))
 | 
			
		||||
    self.assertTrue(class_util.IsMethodOverridden(Bbb, Jjj, 'Two'))
 | 
			
		||||
    self.assertFalse(class_util.IsMethodOverridden(Eee, Fff, 'Three'))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,502 @@
 | 
			
		||||
# Copyright 2014 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
"""Wrappers for gsutil, for basic interaction with Google Cloud Storage."""
 | 
			
		||||
 | 
			
		||||
import collections
 | 
			
		||||
import contextlib
 | 
			
		||||
import hashlib
 | 
			
		||||
import logging
 | 
			
		||||
import os
 | 
			
		||||
import re
 | 
			
		||||
import shutil
 | 
			
		||||
import stat
 | 
			
		||||
import subprocess
 | 
			
		||||
import sys
 | 
			
		||||
import tempfile
 | 
			
		||||
import time
 | 
			
		||||
 | 
			
		||||
import py_utils
 | 
			
		||||
from py_utils import cloud_storage_global_lock  # pylint: disable=unused-import
 | 
			
		||||
from py_utils import lock
 | 
			
		||||
 | 
			
		||||
# Do a no-op import here so that cloud_storage_global_lock dep is picked up
 | 
			
		||||
# by https://cs.chromium.org/chromium/src/build/android/test_runner.pydeps.
 | 
			
		||||
# TODO(nedn, jbudorick): figure out a way to get rid of this ugly hack.
 | 
			
		||||
 | 
			
		||||
logger = logging.getLogger(__name__)  # pylint: disable=invalid-name
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
PUBLIC_BUCKET = 'chromium-telemetry'
 | 
			
		||||
PARTNER_BUCKET = 'chrome-partner-telemetry'
 | 
			
		||||
INTERNAL_BUCKET = 'chrome-telemetry'
 | 
			
		||||
TELEMETRY_OUTPUT = 'chrome-telemetry-output'
 | 
			
		||||
 | 
			
		||||
# Uses ordered dict to make sure that bucket's key-value items are ordered from
 | 
			
		||||
# the most open to the most restrictive.
 | 
			
		||||
BUCKET_ALIASES = collections.OrderedDict((
 | 
			
		||||
    ('public', PUBLIC_BUCKET),
 | 
			
		||||
    ('partner', PARTNER_BUCKET),
 | 
			
		||||
    ('internal', INTERNAL_BUCKET),
 | 
			
		||||
    ('output', TELEMETRY_OUTPUT),
 | 
			
		||||
))
 | 
			
		||||
 | 
			
		||||
BUCKET_ALIAS_NAMES = list(BUCKET_ALIASES.keys())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_GSUTIL_PATH = os.path.join(py_utils.GetCatapultDir(), 'third_party', 'gsutil',
 | 
			
		||||
                            'gsutil')
 | 
			
		||||
 | 
			
		||||
# TODO(tbarzic): A workaround for http://crbug.com/386416 and
 | 
			
		||||
#     http://crbug.com/359293. See |_RunCommand|.
 | 
			
		||||
_CROS_GSUTIL_HOME_WAR = '/home/chromeos-test/'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# If Environment variables has DISABLE_CLOUD_STORAGE_IO set to '1', any method
 | 
			
		||||
# calls that invoke cloud storage network io will throw exceptions.
 | 
			
		||||
DISABLE_CLOUD_STORAGE_IO = 'DISABLE_CLOUD_STORAGE_IO'
 | 
			
		||||
 | 
			
		||||
# The maximum number of seconds to wait to acquire the pseudo lock for a cloud
 | 
			
		||||
# storage file before raising an exception.
 | 
			
		||||
LOCK_ACQUISITION_TIMEOUT = 10
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class CloudStorageError(Exception):
 | 
			
		||||
 | 
			
		||||
  @staticmethod
 | 
			
		||||
  def _GetConfigInstructions():
 | 
			
		||||
    command = _GSUTIL_PATH
 | 
			
		||||
    if py_utils.IsRunningOnCrosDevice():
 | 
			
		||||
      command = 'HOME=%s %s' % (_CROS_GSUTIL_HOME_WAR, _GSUTIL_PATH)
 | 
			
		||||
    return ('To configure your credentials:\n'
 | 
			
		||||
            '  1. Run "%s config" and follow its instructions.\n'
 | 
			
		||||
            '  2. If you have a @google.com account, use that account.\n'
 | 
			
		||||
            '  3. For the project-id, just enter 0.' % command)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PermissionError(CloudStorageError):
 | 
			
		||||
 | 
			
		||||
  def __init__(self):
 | 
			
		||||
    super(PermissionError, self).__init__(
 | 
			
		||||
        'Attempted to access a file from Cloud Storage but you don\'t '
 | 
			
		||||
        'have permission. ' + self._GetConfigInstructions())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class CredentialsError(CloudStorageError):
 | 
			
		||||
 | 
			
		||||
  def __init__(self):
 | 
			
		||||
    super(CredentialsError, self).__init__(
 | 
			
		||||
        'Attempted to access a file from Cloud Storage but you have no '
 | 
			
		||||
        'configured credentials. ' + self._GetConfigInstructions())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class CloudStorageIODisabled(CloudStorageError):
 | 
			
		||||
  pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class NotFoundError(CloudStorageError):
 | 
			
		||||
  pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ServerError(CloudStorageError):
 | 
			
		||||
  pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO(tonyg/dtu): Can this be replaced with distutils.spawn.find_executable()?
 | 
			
		||||
def _FindExecutableInPath(relative_executable_path, *extra_search_paths):
 | 
			
		||||
  search_paths = list(extra_search_paths) + os.environ['PATH'].split(os.pathsep)
 | 
			
		||||
  for search_path in search_paths:
 | 
			
		||||
    executable_path = os.path.join(search_path, relative_executable_path)
 | 
			
		||||
    if py_utils.IsExecutable(executable_path):
 | 
			
		||||
      return executable_path
 | 
			
		||||
  return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _EnsureExecutable(gsutil):
 | 
			
		||||
  """chmod +x if gsutil is not executable."""
 | 
			
		||||
  st = os.stat(gsutil)
 | 
			
		||||
  if not st.st_mode & stat.S_IEXEC:
 | 
			
		||||
    os.chmod(gsutil, st.st_mode | stat.S_IEXEC)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _IsRunningOnSwarming():
 | 
			
		||||
  return os.environ.get('SWARMING_HEADLESS') is not None
 | 
			
		||||
 | 
			
		||||
def _RunCommand(args):
 | 
			
		||||
  # On cros device, as telemetry is running as root, home will be set to /root/,
 | 
			
		||||
  # which is not writable. gsutil will attempt to create a download tracker dir
 | 
			
		||||
  # in home dir and fail. To avoid this, override HOME dir to something writable
 | 
			
		||||
  # when running on cros device.
 | 
			
		||||
  #
 | 
			
		||||
  # TODO(tbarzic): Figure out a better way to handle gsutil on cros.
 | 
			
		||||
  #     http://crbug.com/386416, http://crbug.com/359293.
 | 
			
		||||
  gsutil_env = None
 | 
			
		||||
  if py_utils.IsRunningOnCrosDevice():
 | 
			
		||||
    gsutil_env = os.environ.copy()
 | 
			
		||||
    gsutil_env['HOME'] = _CROS_GSUTIL_HOME_WAR
 | 
			
		||||
  elif _IsRunningOnSwarming():
 | 
			
		||||
    gsutil_env = os.environ.copy()
 | 
			
		||||
 | 
			
		||||
  if os.name == 'nt':
 | 
			
		||||
    # If Windows, prepend python. Python scripts aren't directly executable.
 | 
			
		||||
    args = [sys.executable, _GSUTIL_PATH] + args
 | 
			
		||||
  else:
 | 
			
		||||
    # Don't do it on POSIX, in case someone is using a shell script to redirect.
 | 
			
		||||
    args = [_GSUTIL_PATH] + args
 | 
			
		||||
    _EnsureExecutable(_GSUTIL_PATH)
 | 
			
		||||
 | 
			
		||||
  if args[0] not in ('help', 'hash', 'version') and not IsNetworkIOEnabled():
 | 
			
		||||
    raise CloudStorageIODisabled(
 | 
			
		||||
        "Environment variable DISABLE_CLOUD_STORAGE_IO is set to 1. "
 | 
			
		||||
        'Command %s is not allowed to run' % args)
 | 
			
		||||
 | 
			
		||||
  gsutil = subprocess.Popen(args, stdout=subprocess.PIPE,
 | 
			
		||||
                            stderr=subprocess.PIPE, env=gsutil_env)
 | 
			
		||||
  stdout, stderr = gsutil.communicate()
 | 
			
		||||
 | 
			
		||||
  if gsutil.returncode:
 | 
			
		||||
    raise GetErrorObjectForCloudStorageStderr(stderr)
 | 
			
		||||
 | 
			
		||||
  return stdout
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetErrorObjectForCloudStorageStderr(stderr):
 | 
			
		||||
  if (stderr.startswith((
 | 
			
		||||
      'You are attempting to access protected data with no configured',
 | 
			
		||||
      'Failure: No handler was ready to authenticate.')) or
 | 
			
		||||
      re.match('.*401.*does not have .* access to .*', stderr)):
 | 
			
		||||
    return CredentialsError()
 | 
			
		||||
  if ('status=403' in stderr or 'status 403' in stderr or
 | 
			
		||||
      '403 Forbidden' in stderr or
 | 
			
		||||
      re.match('.*403.*does not have .* access to .*', stderr)):
 | 
			
		||||
    return PermissionError()
 | 
			
		||||
  if (stderr.startswith('InvalidUriError') or 'No such object' in stderr or
 | 
			
		||||
      'No URLs matched' in stderr or 'One or more URLs matched no' in stderr):
 | 
			
		||||
    return NotFoundError(stderr)
 | 
			
		||||
  if '500 Internal Server Error' in stderr:
 | 
			
		||||
    return ServerError(stderr)
 | 
			
		||||
  return CloudStorageError(stderr)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def IsNetworkIOEnabled():
 | 
			
		||||
  """Returns true if cloud storage is enabled."""
 | 
			
		||||
  disable_cloud_storage_env_val = os.getenv(DISABLE_CLOUD_STORAGE_IO)
 | 
			
		||||
 | 
			
		||||
  if disable_cloud_storage_env_val and disable_cloud_storage_env_val != '1':
 | 
			
		||||
    logger.error(
 | 
			
		||||
        'Unsupported value of environment variable '
 | 
			
		||||
        'DISABLE_CLOUD_STORAGE_IO. Expected None or \'1\' but got %s.',
 | 
			
		||||
        disable_cloud_storage_env_val)
 | 
			
		||||
 | 
			
		||||
  return disable_cloud_storage_env_val != '1'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def List(bucket):
 | 
			
		||||
  query = 'gs://%s/' % bucket
 | 
			
		||||
  stdout = _RunCommand(['ls', query])
 | 
			
		||||
  return [url[len(query):] for url in stdout.splitlines()]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Exists(bucket, remote_path):
 | 
			
		||||
  try:
 | 
			
		||||
    _RunCommand(['ls', 'gs://%s/%s' % (bucket, remote_path)])
 | 
			
		||||
    return True
 | 
			
		||||
  except NotFoundError:
 | 
			
		||||
    return False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Move(bucket1, bucket2, remote_path):
 | 
			
		||||
  url1 = 'gs://%s/%s' % (bucket1, remote_path)
 | 
			
		||||
  url2 = 'gs://%s/%s' % (bucket2, remote_path)
 | 
			
		||||
  logger.info('Moving %s to %s', url1, url2)
 | 
			
		||||
  _RunCommand(['mv', url1, url2])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Copy(bucket_from, bucket_to, remote_path_from, remote_path_to):
 | 
			
		||||
  """Copy a file from one location in CloudStorage to another.
 | 
			
		||||
 | 
			
		||||
  Args:
 | 
			
		||||
      bucket_from: The cloud storage bucket where the file is currently located.
 | 
			
		||||
      bucket_to: The cloud storage bucket it is being copied to.
 | 
			
		||||
      remote_path_from: The file path where the file is located in bucket_from.
 | 
			
		||||
      remote_path_to: The file path it is being copied to in bucket_to.
 | 
			
		||||
 | 
			
		||||
  It should: cause no changes locally or to the starting file, and will
 | 
			
		||||
  overwrite any existing files in the destination location.
 | 
			
		||||
  """
 | 
			
		||||
  url1 = 'gs://%s/%s' % (bucket_from, remote_path_from)
 | 
			
		||||
  url2 = 'gs://%s/%s' % (bucket_to, remote_path_to)
 | 
			
		||||
  logger.info('Copying %s to %s', url1, url2)
 | 
			
		||||
  _RunCommand(['cp', url1, url2])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Delete(bucket, remote_path):
 | 
			
		||||
  url = 'gs://%s/%s' % (bucket, remote_path)
 | 
			
		||||
  logger.info('Deleting %s', url)
 | 
			
		||||
  _RunCommand(['rm', url])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Get(bucket, remote_path, local_path):
 | 
			
		||||
  with _FileLock(local_path):
 | 
			
		||||
    _GetLocked(bucket, remote_path, local_path)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_CLOUD_STORAGE_GLOBAL_LOCK = os.path.join(
 | 
			
		||||
    os.path.dirname(os.path.abspath(__file__)), 'cloud_storage_global_lock.py')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@contextlib.contextmanager
 | 
			
		||||
def _FileLock(base_path):
 | 
			
		||||
  pseudo_lock_path = '%s.pseudo_lock' % base_path
 | 
			
		||||
  _CreateDirectoryIfNecessary(os.path.dirname(pseudo_lock_path))
 | 
			
		||||
 | 
			
		||||
  # Make sure that we guard the creation, acquisition, release, and removal of
 | 
			
		||||
  # the pseudo lock all with the same guard (_CLOUD_STORAGE_GLOBAL_LOCK).
 | 
			
		||||
  # Otherwise, we can get nasty interleavings that result in multiple processes
 | 
			
		||||
  # thinking they have an exclusive lock, like:
 | 
			
		||||
  #
 | 
			
		||||
  # (Process 1) Create and acquire the pseudo lock
 | 
			
		||||
  # (Process 1) Release the pseudo lock
 | 
			
		||||
  # (Process 1) Release the file lock
 | 
			
		||||
  # (Process 2) Open and acquire the existing pseudo lock
 | 
			
		||||
  # (Process 1) Delete the (existing) pseudo lock
 | 
			
		||||
  # (Process 3) Create and acquire a new pseudo lock
 | 
			
		||||
  #
 | 
			
		||||
  # Using the same guard for creation and removal of the pseudo lock guarantees
 | 
			
		||||
  # that all processes are referring to the same lock.
 | 
			
		||||
  pseudo_lock_fd = None
 | 
			
		||||
  pseudo_lock_fd_return = []
 | 
			
		||||
  py_utils.WaitFor(lambda: _AttemptPseudoLockAcquisition(pseudo_lock_path,
 | 
			
		||||
                                                         pseudo_lock_fd_return),
 | 
			
		||||
                   LOCK_ACQUISITION_TIMEOUT)
 | 
			
		||||
  pseudo_lock_fd = pseudo_lock_fd_return[0]
 | 
			
		||||
 | 
			
		||||
  try:
 | 
			
		||||
    yield
 | 
			
		||||
  finally:
 | 
			
		||||
    py_utils.WaitFor(lambda: _AttemptPseudoLockRelease(pseudo_lock_fd),
 | 
			
		||||
                     LOCK_ACQUISITION_TIMEOUT)
 | 
			
		||||
 | 
			
		||||
def _AttemptPseudoLockAcquisition(pseudo_lock_path, pseudo_lock_fd_return):
 | 
			
		||||
  """Try to acquire the lock and return a boolean indicating whether the attempt
 | 
			
		||||
  was successful. If the attempt was successful, pseudo_lock_fd_return, which
 | 
			
		||||
  should be an empty array, will be modified to contain a single entry: the file
 | 
			
		||||
  descriptor of the (now acquired) lock file.
 | 
			
		||||
 | 
			
		||||
  This whole operation is guarded with the global cloud storage lock, which
 | 
			
		||||
  prevents race conditions that might otherwise cause multiple processes to
 | 
			
		||||
  believe they hold the same pseudo lock (see _FileLock for more details).
 | 
			
		||||
  """
 | 
			
		||||
  pseudo_lock_fd = None
 | 
			
		||||
  try:
 | 
			
		||||
    with open(_CLOUD_STORAGE_GLOBAL_LOCK) as global_file:
 | 
			
		||||
      with lock.FileLock(global_file, lock.LOCK_EX | lock.LOCK_NB):
 | 
			
		||||
        # Attempt to acquire the lock in a non-blocking manner. If we block,
 | 
			
		||||
        # then we'll cause deadlock because another process will be unable to
 | 
			
		||||
        # acquire the cloud storage global lock in order to release the pseudo
 | 
			
		||||
        # lock.
 | 
			
		||||
        pseudo_lock_fd = open(pseudo_lock_path, 'w')
 | 
			
		||||
        lock.AcquireFileLock(pseudo_lock_fd, lock.LOCK_EX | lock.LOCK_NB)
 | 
			
		||||
        pseudo_lock_fd_return.append(pseudo_lock_fd)
 | 
			
		||||
        return True
 | 
			
		||||
  except (lock.LockException, IOError):
 | 
			
		||||
    # We failed to acquire either the global cloud storage lock or the pseudo
 | 
			
		||||
    # lock.
 | 
			
		||||
    if pseudo_lock_fd:
 | 
			
		||||
      pseudo_lock_fd.close()
 | 
			
		||||
    return False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _AttemptPseudoLockRelease(pseudo_lock_fd):
 | 
			
		||||
  """Try to release the pseudo lock and return a boolean indicating whether
 | 
			
		||||
  the release was succesful.
 | 
			
		||||
 | 
			
		||||
  This whole operation is guarded with the global cloud storage lock, which
 | 
			
		||||
  prevents race conditions that might otherwise cause multiple processes to
 | 
			
		||||
  believe they hold the same pseudo lock (see _FileLock for more details).
 | 
			
		||||
  """
 | 
			
		||||
  pseudo_lock_path = pseudo_lock_fd.name
 | 
			
		||||
  try:
 | 
			
		||||
    with open(_CLOUD_STORAGE_GLOBAL_LOCK) as global_file:
 | 
			
		||||
      with lock.FileLock(global_file, lock.LOCK_EX | lock.LOCK_NB):
 | 
			
		||||
        lock.ReleaseFileLock(pseudo_lock_fd)
 | 
			
		||||
        pseudo_lock_fd.close()
 | 
			
		||||
        try:
 | 
			
		||||
          os.remove(pseudo_lock_path)
 | 
			
		||||
        except OSError:
 | 
			
		||||
          # We don't care if the pseudo lock gets removed elsewhere before
 | 
			
		||||
          # we have a chance to do so.
 | 
			
		||||
          pass
 | 
			
		||||
        return True
 | 
			
		||||
  except (lock.LockException, IOError):
 | 
			
		||||
    # We failed to acquire the global cloud storage lock and are thus unable to
 | 
			
		||||
    # release the pseudo lock.
 | 
			
		||||
    return False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _CreateDirectoryIfNecessary(directory):
 | 
			
		||||
  if not os.path.exists(directory):
 | 
			
		||||
    os.makedirs(directory)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _GetLocked(bucket, remote_path, local_path):
 | 
			
		||||
  url = 'gs://%s/%s' % (bucket, remote_path)
 | 
			
		||||
  logger.info('Downloading %s to %s', url, local_path)
 | 
			
		||||
  _CreateDirectoryIfNecessary(os.path.dirname(local_path))
 | 
			
		||||
  with tempfile.NamedTemporaryFile(
 | 
			
		||||
      dir=os.path.dirname(local_path),
 | 
			
		||||
      delete=False) as partial_download_path:
 | 
			
		||||
    try:
 | 
			
		||||
      # Windows won't download to an open file.
 | 
			
		||||
      partial_download_path.close()
 | 
			
		||||
      try:
 | 
			
		||||
        _RunCommand(['cp', url, partial_download_path.name])
 | 
			
		||||
      except ServerError:
 | 
			
		||||
        logger.info('Cloud Storage server error, retrying download')
 | 
			
		||||
        _RunCommand(['cp', url, partial_download_path.name])
 | 
			
		||||
      shutil.move(partial_download_path.name, local_path)
 | 
			
		||||
    finally:
 | 
			
		||||
      if os.path.exists(partial_download_path.name):
 | 
			
		||||
        os.remove(partial_download_path.name)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Insert(bucket, remote_path, local_path, publicly_readable=False):
 | 
			
		||||
  """ Upload file in |local_path| to cloud storage.
 | 
			
		||||
  Args:
 | 
			
		||||
    bucket: the google cloud storage bucket name.
 | 
			
		||||
    remote_path: the remote file path in |bucket|.
 | 
			
		||||
    local_path: path of the local file to be uploaded.
 | 
			
		||||
    publicly_readable: whether the uploaded file has publicly readable
 | 
			
		||||
    permission.
 | 
			
		||||
 | 
			
		||||
  Returns:
 | 
			
		||||
    The url where the file is uploaded to.
 | 
			
		||||
  """
 | 
			
		||||
  url = 'gs://%s/%s' % (bucket, remote_path)
 | 
			
		||||
  command_and_args = ['cp']
 | 
			
		||||
  extra_info = ''
 | 
			
		||||
  if publicly_readable:
 | 
			
		||||
    command_and_args += ['-a', 'public-read']
 | 
			
		||||
    extra_info = ' (publicly readable)'
 | 
			
		||||
  command_and_args += [local_path, url]
 | 
			
		||||
  logger.info('Uploading %s to %s%s', local_path, url, extra_info)
 | 
			
		||||
  _RunCommand(command_and_args)
 | 
			
		||||
  return 'https://console.developers.google.com/m/cloudstorage/b/%s/o/%s' % (
 | 
			
		||||
      bucket, remote_path)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetIfHashChanged(cs_path, download_path, bucket, file_hash):
 | 
			
		||||
  """Downloads |download_path| to |file_path| if |file_path| doesn't exist or
 | 
			
		||||
     it's hash doesn't match |file_hash|.
 | 
			
		||||
 | 
			
		||||
  Returns:
 | 
			
		||||
    True if the binary was changed.
 | 
			
		||||
  Raises:
 | 
			
		||||
    CredentialsError if the user has no configured credentials.
 | 
			
		||||
    PermissionError if the user does not have permission to access the bucket.
 | 
			
		||||
    NotFoundError if the file is not in the given bucket in cloud_storage.
 | 
			
		||||
  """
 | 
			
		||||
  with _FileLock(download_path):
 | 
			
		||||
    if (os.path.exists(download_path) and
 | 
			
		||||
        CalculateHash(download_path) == file_hash):
 | 
			
		||||
      return False
 | 
			
		||||
    _GetLocked(bucket, cs_path, download_path)
 | 
			
		||||
    return True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetIfChanged(file_path, bucket):
 | 
			
		||||
  """Gets the file at file_path if it has a hash file that doesn't match or
 | 
			
		||||
  if there is no local copy of file_path, but there is a hash file for it.
 | 
			
		||||
 | 
			
		||||
  Returns:
 | 
			
		||||
    True if the binary was changed.
 | 
			
		||||
  Raises:
 | 
			
		||||
    CredentialsError if the user has no configured credentials.
 | 
			
		||||
    PermissionError if the user does not have permission to access the bucket.
 | 
			
		||||
    NotFoundError if the file is not in the given bucket in cloud_storage.
 | 
			
		||||
  """
 | 
			
		||||
  with _FileLock(file_path):
 | 
			
		||||
    hash_path = file_path + '.sha1'
 | 
			
		||||
    fetch_ts_path = file_path + '.fetchts'
 | 
			
		||||
    if not os.path.exists(hash_path):
 | 
			
		||||
      logger.warning('Hash file not found: %s', hash_path)
 | 
			
		||||
      return False
 | 
			
		||||
 | 
			
		||||
    expected_hash = ReadHash(hash_path)
 | 
			
		||||
 | 
			
		||||
    # To save the time required computing binary hash (which is an expensive
 | 
			
		||||
    # operation, see crbug.com/793609#c2 for details), any time we fetch a new
 | 
			
		||||
    # binary, we save not only that binary but the time of the fetch in
 | 
			
		||||
    # |fetch_ts_path|. Anytime the file needs updated (its
 | 
			
		||||
    # hash in |hash_path| change), we can just need to compare the timestamp of
 | 
			
		||||
    # |hash_path| with the timestamp in |fetch_ts_path| to figure out
 | 
			
		||||
    # if the update operation has been done.
 | 
			
		||||
    #
 | 
			
		||||
    # Notes: for this to work, we make the assumption that only
 | 
			
		||||
    # cloud_storage.GetIfChanged modifies the local |file_path| binary.
 | 
			
		||||
 | 
			
		||||
    if os.path.exists(fetch_ts_path) and os.path.exists(file_path):
 | 
			
		||||
      with open(fetch_ts_path) as f:
 | 
			
		||||
        data = f.read().strip()
 | 
			
		||||
        last_binary_fetch_ts = float(data)
 | 
			
		||||
 | 
			
		||||
      if last_binary_fetch_ts > os.path.getmtime(hash_path):
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
    # Whether the binary stored in local already has hash matched
 | 
			
		||||
    # expected_hash or we need to fetch new binary from cloud, update the
 | 
			
		||||
    # timestamp in |fetch_ts_path| with current time anyway since it is
 | 
			
		||||
    # outdated compared with sha1's last modified time.
 | 
			
		||||
    with open(fetch_ts_path, 'w') as f:
 | 
			
		||||
      f.write(str(time.time()))
 | 
			
		||||
 | 
			
		||||
    if os.path.exists(file_path) and CalculateHash(file_path) == expected_hash:
 | 
			
		||||
      return False
 | 
			
		||||
    _GetLocked(bucket, expected_hash, file_path)
 | 
			
		||||
    if CalculateHash(file_path) != expected_hash:
 | 
			
		||||
      os.remove(fetch_ts_path)
 | 
			
		||||
      raise RuntimeError(
 | 
			
		||||
          'Binary stored in cloud storage does not have hash matching .sha1 '
 | 
			
		||||
          'file. Please make sure that the binary file is uploaded using '
 | 
			
		||||
          'depot_tools/upload_to_google_storage.py script or through automatic '
 | 
			
		||||
          'framework.')
 | 
			
		||||
    return True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def GetFilesInDirectoryIfChanged(directory, bucket):
 | 
			
		||||
  """ Scan the directory for .sha1 files, and download them from the given
 | 
			
		||||
  bucket in cloud storage if the local and remote hash don't match or
 | 
			
		||||
  there is no local copy.
 | 
			
		||||
  """
 | 
			
		||||
  if not os.path.isdir(directory):
 | 
			
		||||
    raise ValueError(
 | 
			
		||||
        '%s does not exist. Must provide a valid directory path.' % directory)
 | 
			
		||||
  # Don't allow the root directory to be a serving_dir.
 | 
			
		||||
  if directory == os.path.abspath(os.sep):
 | 
			
		||||
    raise ValueError('Trying to serve root directory from HTTP server.')
 | 
			
		||||
  for dirpath, _, filenames in os.walk(directory):
 | 
			
		||||
    for filename in filenames:
 | 
			
		||||
      path_name, extension = os.path.splitext(
 | 
			
		||||
          os.path.join(dirpath, filename))
 | 
			
		||||
      if extension != '.sha1':
 | 
			
		||||
        continue
 | 
			
		||||
      GetIfChanged(path_name, bucket)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def CalculateHash(file_path):
 | 
			
		||||
  """Calculates and returns the hash of the file at file_path."""
 | 
			
		||||
  sha1 = hashlib.sha1()
 | 
			
		||||
  with open(file_path, 'rb') as f:
 | 
			
		||||
    while True:
 | 
			
		||||
      # Read in 1mb chunks, so it doesn't all have to be loaded into memory.
 | 
			
		||||
      chunk = f.read(1024 * 1024)
 | 
			
		||||
      if not chunk:
 | 
			
		||||
        break
 | 
			
		||||
      sha1.update(chunk)
 | 
			
		||||
  return sha1.hexdigest()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def ReadHash(hash_path):
 | 
			
		||||
  with open(hash_path, 'rb') as f:
 | 
			
		||||
    return f.read(1024).rstrip()
 | 
			
		||||
@ -0,0 +1,5 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
# This file is used by cloud_storage._FileLock implementation, don't delete it!
 | 
			
		||||
@ -0,0 +1,387 @@
 | 
			
		||||
# Copyright 2014 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import os
 | 
			
		||||
import shutil
 | 
			
		||||
import sys
 | 
			
		||||
import tempfile
 | 
			
		||||
import unittest
 | 
			
		||||
 | 
			
		||||
import mock
 | 
			
		||||
from pyfakefs import fake_filesystem_unittest
 | 
			
		||||
 | 
			
		||||
import py_utils
 | 
			
		||||
from py_utils import cloud_storage
 | 
			
		||||
from py_utils import lock
 | 
			
		||||
 | 
			
		||||
_CLOUD_STORAGE_GLOBAL_LOCK_PATH = os.path.join(
 | 
			
		||||
    os.path.dirname(__file__), 'cloud_storage_global_lock.py')
 | 
			
		||||
 | 
			
		||||
def _FakeReadHash(_):
 | 
			
		||||
  return 'hashthis!'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _FakeCalulateHashMatchesRead(_):
 | 
			
		||||
  return 'hashthis!'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _FakeCalulateHashNewHash(_):
 | 
			
		||||
  return 'omgnewhash'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class BaseFakeFsUnitTest(fake_filesystem_unittest.TestCase):
 | 
			
		||||
 | 
			
		||||
  def setUp(self):
 | 
			
		||||
    self.original_environ = os.environ.copy()
 | 
			
		||||
    os.environ['DISABLE_CLOUD_STORAGE_IO'] = ''
 | 
			
		||||
    self.setUpPyfakefs()
 | 
			
		||||
    self.fs.CreateFile(
 | 
			
		||||
        os.path.join(py_utils.GetCatapultDir(),
 | 
			
		||||
                     'third_party', 'gsutil', 'gsutil'))
 | 
			
		||||
 | 
			
		||||
  def CreateFiles(self, file_paths):
 | 
			
		||||
    for f in file_paths:
 | 
			
		||||
      self.fs.CreateFile(f)
 | 
			
		||||
 | 
			
		||||
  def tearDown(self):
 | 
			
		||||
    self.tearDownPyfakefs()
 | 
			
		||||
    os.environ = self.original_environ
 | 
			
		||||
 | 
			
		||||
  def _FakeRunCommand(self, cmd):
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
  def _FakeGet(self, bucket, remote_path, local_path):
 | 
			
		||||
    pass
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class CloudStorageFakeFsUnitTest(BaseFakeFsUnitTest):
 | 
			
		||||
 | 
			
		||||
  def _AssertRunCommandRaisesError(self, communicate_strs, error):
 | 
			
		||||
    with mock.patch('py_utils.cloud_storage.subprocess.Popen') as popen:
 | 
			
		||||
      p_mock = mock.Mock()
 | 
			
		||||
      popen.return_value = p_mock
 | 
			
		||||
      p_mock.returncode = 1
 | 
			
		||||
      for stderr in communicate_strs:
 | 
			
		||||
        p_mock.communicate.return_value = ('', stderr)
 | 
			
		||||
        self.assertRaises(error, cloud_storage._RunCommand, [])
 | 
			
		||||
 | 
			
		||||
  def testRunCommandCredentialsError(self):
 | 
			
		||||
    strs = ['You are attempting to access protected data with no configured',
 | 
			
		||||
            'Failure: No handler was ready to authenticate.']
 | 
			
		||||
    self._AssertRunCommandRaisesError(strs, cloud_storage.CredentialsError)
 | 
			
		||||
 | 
			
		||||
  def testRunCommandPermissionError(self):
 | 
			
		||||
    strs = ['status=403', 'status 403', '403 Forbidden']
 | 
			
		||||
    self._AssertRunCommandRaisesError(strs, cloud_storage.PermissionError)
 | 
			
		||||
 | 
			
		||||
  def testRunCommandNotFoundError(self):
 | 
			
		||||
    strs = ['InvalidUriError', 'No such object', 'No URLs matched',
 | 
			
		||||
            'One or more URLs matched no', 'InvalidUriError']
 | 
			
		||||
    self._AssertRunCommandRaisesError(strs, cloud_storage.NotFoundError)
 | 
			
		||||
 | 
			
		||||
  def testRunCommandServerError(self):
 | 
			
		||||
    strs = ['500 Internal Server Error']
 | 
			
		||||
    self._AssertRunCommandRaisesError(strs, cloud_storage.ServerError)
 | 
			
		||||
 | 
			
		||||
  def testRunCommandGenericError(self):
 | 
			
		||||
    strs = ['Random string']
 | 
			
		||||
    self._AssertRunCommandRaisesError(strs, cloud_storage.CloudStorageError)
 | 
			
		||||
 | 
			
		||||
  def testInsertCreatesValidCloudUrl(self):
 | 
			
		||||
    orig_run_command = cloud_storage._RunCommand
 | 
			
		||||
    try:
 | 
			
		||||
      cloud_storage._RunCommand = self._FakeRunCommand
 | 
			
		||||
      remote_path = 'test-remote-path.html'
 | 
			
		||||
      local_path = 'test-local-path.html'
 | 
			
		||||
      cloud_url = cloud_storage.Insert(cloud_storage.PUBLIC_BUCKET,
 | 
			
		||||
                                       remote_path, local_path)
 | 
			
		||||
      self.assertEqual('https://console.developers.google.com/m/cloudstorage'
 | 
			
		||||
                       '/b/chromium-telemetry/o/test-remote-path.html',
 | 
			
		||||
                       cloud_url)
 | 
			
		||||
    finally:
 | 
			
		||||
      cloud_storage._RunCommand = orig_run_command
 | 
			
		||||
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage.subprocess')
 | 
			
		||||
  def testExistsReturnsFalse(self, subprocess_mock):
 | 
			
		||||
    p_mock = mock.Mock()
 | 
			
		||||
    subprocess_mock.Popen.return_value = p_mock
 | 
			
		||||
    p_mock.communicate.return_value = (
 | 
			
		||||
        '',
 | 
			
		||||
        'CommandException: One or more URLs matched no objects.\n')
 | 
			
		||||
    p_mock.returncode_result = 1
 | 
			
		||||
    self.assertFalse(cloud_storage.Exists('fake bucket',
 | 
			
		||||
                                          'fake remote path'))
 | 
			
		||||
 | 
			
		||||
  @unittest.skipIf(sys.platform.startswith('win'),
 | 
			
		||||
                   'https://github.com/catapult-project/catapult/issues/1861')
 | 
			
		||||
  def testGetFilesInDirectoryIfChanged(self):
 | 
			
		||||
    self.CreateFiles([
 | 
			
		||||
        'real_dir_path/dir1/1file1.sha1',
 | 
			
		||||
        'real_dir_path/dir1/1file2.txt',
 | 
			
		||||
        'real_dir_path/dir1/1file3.sha1',
 | 
			
		||||
        'real_dir_path/dir2/2file.txt',
 | 
			
		||||
        'real_dir_path/dir3/3file1.sha1'])
 | 
			
		||||
 | 
			
		||||
    def IncrementFilesUpdated(*_):
 | 
			
		||||
      IncrementFilesUpdated.files_updated += 1
 | 
			
		||||
    IncrementFilesUpdated.files_updated = 0
 | 
			
		||||
    orig_get_if_changed = cloud_storage.GetIfChanged
 | 
			
		||||
    cloud_storage.GetIfChanged = IncrementFilesUpdated
 | 
			
		||||
    try:
 | 
			
		||||
      self.assertRaises(ValueError, cloud_storage.GetFilesInDirectoryIfChanged,
 | 
			
		||||
                        os.path.abspath(os.sep), cloud_storage.PUBLIC_BUCKET)
 | 
			
		||||
      self.assertEqual(0, IncrementFilesUpdated.files_updated)
 | 
			
		||||
      self.assertRaises(ValueError, cloud_storage.GetFilesInDirectoryIfChanged,
 | 
			
		||||
                        'fake_dir_path', cloud_storage.PUBLIC_BUCKET)
 | 
			
		||||
      self.assertEqual(0, IncrementFilesUpdated.files_updated)
 | 
			
		||||
      cloud_storage.GetFilesInDirectoryIfChanged('real_dir_path',
 | 
			
		||||
                                                 cloud_storage.PUBLIC_BUCKET)
 | 
			
		||||
      self.assertEqual(3, IncrementFilesUpdated.files_updated)
 | 
			
		||||
    finally:
 | 
			
		||||
      cloud_storage.GetIfChanged = orig_get_if_changed
 | 
			
		||||
 | 
			
		||||
  def testCopy(self):
 | 
			
		||||
    orig_run_command = cloud_storage._RunCommand
 | 
			
		||||
 | 
			
		||||
    def AssertCorrectRunCommandArgs(args):
 | 
			
		||||
      self.assertEqual(expected_args, args)
 | 
			
		||||
    cloud_storage._RunCommand = AssertCorrectRunCommandArgs
 | 
			
		||||
    expected_args = ['cp', 'gs://bucket1/remote_path1',
 | 
			
		||||
                     'gs://bucket2/remote_path2']
 | 
			
		||||
    try:
 | 
			
		||||
      cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1', 'remote_path2')
 | 
			
		||||
    finally:
 | 
			
		||||
      cloud_storage._RunCommand = orig_run_command
 | 
			
		||||
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage.subprocess.Popen')
 | 
			
		||||
  def testSwarmingUsesExistingEnv(self, mock_popen):
 | 
			
		||||
    os.environ['SWARMING_HEADLESS'] = '1'
 | 
			
		||||
 | 
			
		||||
    mock_gsutil = mock_popen()
 | 
			
		||||
    mock_gsutil.communicate = mock.MagicMock(return_value=('a', 'b'))
 | 
			
		||||
    mock_gsutil.returncode = None
 | 
			
		||||
 | 
			
		||||
    cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1', 'remote_path2')
 | 
			
		||||
 | 
			
		||||
    mock_popen.assert_called_with(
 | 
			
		||||
        mock.ANY, stderr=-1, env=os.environ, stdout=-1)
 | 
			
		||||
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._FileLock')
 | 
			
		||||
  def testDisableCloudStorageIo(self, unused_lock_mock):
 | 
			
		||||
    os.environ['DISABLE_CLOUD_STORAGE_IO'] = '1'
 | 
			
		||||
    dir_path = 'real_dir_path'
 | 
			
		||||
    self.fs.CreateDirectory(dir_path)
 | 
			
		||||
    file_path = os.path.join(dir_path, 'file1')
 | 
			
		||||
    file_path_sha = file_path + '.sha1'
 | 
			
		||||
 | 
			
		||||
    def CleanTimeStampFile():
 | 
			
		||||
      os.remove(file_path + '.fetchts')
 | 
			
		||||
 | 
			
		||||
    self.CreateFiles([file_path, file_path_sha])
 | 
			
		||||
    with open(file_path_sha, 'w') as f:
 | 
			
		||||
      f.write('hash1234')
 | 
			
		||||
    with self.assertRaises(cloud_storage.CloudStorageIODisabled):
 | 
			
		||||
      cloud_storage.Copy('bucket1', 'bucket2', 'remote_path1', 'remote_path2')
 | 
			
		||||
    with self.assertRaises(cloud_storage.CloudStorageIODisabled):
 | 
			
		||||
      cloud_storage.Get('bucket', 'foo', file_path)
 | 
			
		||||
    with self.assertRaises(cloud_storage.CloudStorageIODisabled):
 | 
			
		||||
      cloud_storage.GetIfChanged(file_path, 'foo')
 | 
			
		||||
    with self.assertRaises(cloud_storage.CloudStorageIODisabled):
 | 
			
		||||
      cloud_storage.GetIfHashChanged('bar', file_path, 'bucket', 'hash1234')
 | 
			
		||||
    with self.assertRaises(cloud_storage.CloudStorageIODisabled):
 | 
			
		||||
      cloud_storage.Insert('bucket', 'foo', file_path)
 | 
			
		||||
 | 
			
		||||
    CleanTimeStampFile()
 | 
			
		||||
    with self.assertRaises(cloud_storage.CloudStorageIODisabled):
 | 
			
		||||
      cloud_storage.GetFilesInDirectoryIfChanged(dir_path, 'bucket')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class GetIfChangedTests(BaseFakeFsUnitTest):
 | 
			
		||||
 | 
			
		||||
  def setUp(self):
 | 
			
		||||
    super(GetIfChangedTests, self).setUp()
 | 
			
		||||
    self._orig_read_hash = cloud_storage.ReadHash
 | 
			
		||||
    self._orig_calculate_hash = cloud_storage.CalculateHash
 | 
			
		||||
 | 
			
		||||
  def tearDown(self):
 | 
			
		||||
    super(GetIfChangedTests, self).tearDown()
 | 
			
		||||
    cloud_storage.CalculateHash = self._orig_calculate_hash
 | 
			
		||||
    cloud_storage.ReadHash = self._orig_read_hash
 | 
			
		||||
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._FileLock')
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._GetLocked')
 | 
			
		||||
  def testHashPathDoesNotExists(self, unused_get_locked, unused_lock_mock):
 | 
			
		||||
    cloud_storage.ReadHash = _FakeReadHash
 | 
			
		||||
    cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
 | 
			
		||||
    file_path = 'test-file-path.wpr'
 | 
			
		||||
 | 
			
		||||
    cloud_storage._GetLocked = self._FakeGet
 | 
			
		||||
    # hash_path doesn't exist.
 | 
			
		||||
    self.assertFalse(cloud_storage.GetIfChanged(file_path,
 | 
			
		||||
                                                cloud_storage.PUBLIC_BUCKET))
 | 
			
		||||
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._FileLock')
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._GetLocked')
 | 
			
		||||
  def testHashPathExistsButFilePathDoesNot(
 | 
			
		||||
      self, unused_get_locked, unused_lock_mock):
 | 
			
		||||
    cloud_storage.ReadHash = _FakeReadHash
 | 
			
		||||
    cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
 | 
			
		||||
    file_path = 'test-file-path.wpr'
 | 
			
		||||
    hash_path = file_path + '.sha1'
 | 
			
		||||
 | 
			
		||||
    # hash_path exists, but file_path doesn't.
 | 
			
		||||
    self.CreateFiles([hash_path])
 | 
			
		||||
    self.assertTrue(cloud_storage.GetIfChanged(file_path,
 | 
			
		||||
                                               cloud_storage.PUBLIC_BUCKET))
 | 
			
		||||
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._FileLock')
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._GetLocked')
 | 
			
		||||
  def testHashPathAndFileHashExistWithSameHash(
 | 
			
		||||
      self, unused_get_locked, unused_lock_mock):
 | 
			
		||||
    cloud_storage.ReadHash = _FakeReadHash
 | 
			
		||||
    cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
 | 
			
		||||
    file_path = 'test-file-path.wpr'
 | 
			
		||||
 | 
			
		||||
    # hash_path and file_path exist, and have same hash.
 | 
			
		||||
    self.CreateFiles([file_path])
 | 
			
		||||
    self.assertFalse(cloud_storage.GetIfChanged(file_path,
 | 
			
		||||
                                                cloud_storage.PUBLIC_BUCKET))
 | 
			
		||||
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._FileLock')
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._GetLocked')
 | 
			
		||||
  def testHashPathAndFileHashExistWithDifferentHash(
 | 
			
		||||
      self, mock_get_locked, unused_get_locked):
 | 
			
		||||
    cloud_storage.ReadHash = _FakeReadHash
 | 
			
		||||
    cloud_storage.CalculateHash = _FakeCalulateHashNewHash
 | 
			
		||||
    file_path = 'test-file-path.wpr'
 | 
			
		||||
    hash_path = file_path + '.sha1'
 | 
			
		||||
 | 
			
		||||
    def _FakeGetLocked(bucket, expected_hash, file_path):
 | 
			
		||||
      del bucket, expected_hash, file_path  # unused
 | 
			
		||||
      cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
 | 
			
		||||
 | 
			
		||||
    mock_get_locked.side_effect = _FakeGetLocked
 | 
			
		||||
 | 
			
		||||
    self.CreateFiles([file_path, hash_path])
 | 
			
		||||
    # hash_path and file_path exist, and have different hashes.
 | 
			
		||||
    self.assertTrue(cloud_storage.GetIfChanged(file_path,
 | 
			
		||||
                                               cloud_storage.PUBLIC_BUCKET))
 | 
			
		||||
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._FileLock')
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage.CalculateHash')
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._GetLocked')
 | 
			
		||||
  def testNoHashComputationNeededUponSecondCall(
 | 
			
		||||
      self, mock_get_locked, mock_calculate_hash, unused_get_locked):
 | 
			
		||||
    mock_calculate_hash.side_effect = _FakeCalulateHashNewHash
 | 
			
		||||
    cloud_storage.ReadHash = _FakeReadHash
 | 
			
		||||
    file_path = 'test-file-path.wpr'
 | 
			
		||||
    hash_path = file_path + '.sha1'
 | 
			
		||||
 | 
			
		||||
    def _FakeGetLocked(bucket, expected_hash, file_path):
 | 
			
		||||
      del bucket, expected_hash, file_path  # unused
 | 
			
		||||
      cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
 | 
			
		||||
 | 
			
		||||
    mock_get_locked.side_effect = _FakeGetLocked
 | 
			
		||||
 | 
			
		||||
    self.CreateFiles([file_path, hash_path])
 | 
			
		||||
    # hash_path and file_path exist, and have different hashes. This first call
 | 
			
		||||
    # will invoke a fetch.
 | 
			
		||||
    self.assertTrue(cloud_storage.GetIfChanged(file_path,
 | 
			
		||||
                                               cloud_storage.PUBLIC_BUCKET))
 | 
			
		||||
 | 
			
		||||
    # The fetch left a .fetchts file on machine.
 | 
			
		||||
    self.assertTrue(os.path.exists(file_path + '.fetchts'))
 | 
			
		||||
 | 
			
		||||
    # Subsequent invocations of GetIfChanged should not invoke CalculateHash.
 | 
			
		||||
    mock_calculate_hash.assert_not_called()
 | 
			
		||||
    self.assertFalse(cloud_storage.GetIfChanged(file_path,
 | 
			
		||||
                                                cloud_storage.PUBLIC_BUCKET))
 | 
			
		||||
    self.assertFalse(cloud_storage.GetIfChanged(file_path,
 | 
			
		||||
                                                cloud_storage.PUBLIC_BUCKET))
 | 
			
		||||
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._FileLock')
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage.CalculateHash')
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage._GetLocked')
 | 
			
		||||
  def testRefetchingFileUponHashFileChange(
 | 
			
		||||
      self, mock_get_locked, mock_calculate_hash, unused_get_locked):
 | 
			
		||||
    mock_calculate_hash.side_effect = _FakeCalulateHashNewHash
 | 
			
		||||
    cloud_storage.ReadHash = _FakeReadHash
 | 
			
		||||
    file_path = 'test-file-path.wpr'
 | 
			
		||||
    hash_path = file_path + '.sha1'
 | 
			
		||||
 | 
			
		||||
    def _FakeGetLocked(bucket, expected_hash, file_path):
 | 
			
		||||
      del bucket, expected_hash, file_path  # unused
 | 
			
		||||
      cloud_storage.CalculateHash = _FakeCalulateHashMatchesRead
 | 
			
		||||
 | 
			
		||||
    mock_get_locked.side_effect = _FakeGetLocked
 | 
			
		||||
 | 
			
		||||
    self.CreateFiles([file_path, hash_path])
 | 
			
		||||
    # hash_path and file_path exist, and have different hashes. This first call
 | 
			
		||||
    # will invoke a fetch.
 | 
			
		||||
    self.assertTrue(cloud_storage.GetIfChanged(file_path,
 | 
			
		||||
                                               cloud_storage.PUBLIC_BUCKET))
 | 
			
		||||
 | 
			
		||||
    # The fetch left a .fetchts file on machine.
 | 
			
		||||
    self.assertTrue(os.path.exists(file_path + '.fetchts'))
 | 
			
		||||
 | 
			
		||||
    with open(file_path + '.fetchts') as f:
 | 
			
		||||
      fetchts = float(f.read())
 | 
			
		||||
 | 
			
		||||
    # Updating the .sha1 hash_path file with the new hash after .fetchts
 | 
			
		||||
    # is created.
 | 
			
		||||
    file_obj = self.fs.GetObject(hash_path)
 | 
			
		||||
    file_obj.SetMTime(fetchts + 100)
 | 
			
		||||
 | 
			
		||||
    cloud_storage.ReadHash = lambda _: 'hashNeW'
 | 
			
		||||
    def _FakeGetLockedNewHash(bucket, expected_hash, file_path):
 | 
			
		||||
      del bucket, expected_hash, file_path  # unused
 | 
			
		||||
      cloud_storage.CalculateHash = lambda _: 'hashNeW'
 | 
			
		||||
 | 
			
		||||
    mock_get_locked.side_effect = _FakeGetLockedNewHash
 | 
			
		||||
 | 
			
		||||
    # hash_path and file_path exist, and have different hashes. This first call
 | 
			
		||||
    # will invoke a fetch.
 | 
			
		||||
    self.assertTrue(cloud_storage.GetIfChanged(file_path,
 | 
			
		||||
                                               cloud_storage.PUBLIC_BUCKET))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class CloudStorageRealFsUnitTest(unittest.TestCase):
 | 
			
		||||
 | 
			
		||||
  def setUp(self):
 | 
			
		||||
    self.original_environ = os.environ.copy()
 | 
			
		||||
    os.environ['DISABLE_CLOUD_STORAGE_IO'] = ''
 | 
			
		||||
 | 
			
		||||
  def tearDown(self):
 | 
			
		||||
    os.environ = self.original_environ
 | 
			
		||||
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage.LOCK_ACQUISITION_TIMEOUT', .005)
 | 
			
		||||
  def testGetPseudoLockUnavailableCausesTimeout(self):
 | 
			
		||||
    with tempfile.NamedTemporaryFile(suffix='.pseudo_lock') as pseudo_lock_fd:
 | 
			
		||||
      with lock.FileLock(pseudo_lock_fd, lock.LOCK_EX | lock.LOCK_NB):
 | 
			
		||||
        with self.assertRaises(py_utils.TimeoutException):
 | 
			
		||||
          file_path = pseudo_lock_fd.name.replace('.pseudo_lock', '')
 | 
			
		||||
          cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)
 | 
			
		||||
 | 
			
		||||
  @mock.patch('py_utils.cloud_storage.LOCK_ACQUISITION_TIMEOUT', .005)
 | 
			
		||||
  def testGetGlobalLockUnavailableCausesTimeout(self):
 | 
			
		||||
    with open(_CLOUD_STORAGE_GLOBAL_LOCK_PATH) as global_lock_fd:
 | 
			
		||||
      with lock.FileLock(global_lock_fd, lock.LOCK_EX | lock.LOCK_NB):
 | 
			
		||||
        tmp_dir = tempfile.mkdtemp()
 | 
			
		||||
        try:
 | 
			
		||||
          file_path = os.path.join(tmp_dir, 'foo')
 | 
			
		||||
          with self.assertRaises(py_utils.TimeoutException):
 | 
			
		||||
            cloud_storage.GetIfChanged(file_path, cloud_storage.PUBLIC_BUCKET)
 | 
			
		||||
        finally:
 | 
			
		||||
          shutil.rmtree(tmp_dir)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class CloudStorageErrorHandlingTest(unittest.TestCase):
 | 
			
		||||
  def runTest(self):
 | 
			
		||||
    self.assertIsInstance(cloud_storage.GetErrorObjectForCloudStorageStderr(
 | 
			
		||||
        'ServiceException: 401 Anonymous users does not have '
 | 
			
		||||
        'storage.objects.get access to object chrome-partner-telemetry'),
 | 
			
		||||
                          cloud_storage.CredentialsError)
 | 
			
		||||
    self.assertIsInstance(cloud_storage.GetErrorObjectForCloudStorageStderr(
 | 
			
		||||
        '403 Caller does not have storage.objects.list access to bucket '
 | 
			
		||||
        'chrome-telemetry'), cloud_storage.PermissionError)
 | 
			
		||||
@ -0,0 +1,33 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class _OptionalContextManager(object):
 | 
			
		||||
 | 
			
		||||
  def __init__(self, manager, condition):
 | 
			
		||||
    self._manager = manager
 | 
			
		||||
    self._condition = condition
 | 
			
		||||
 | 
			
		||||
  def __enter__(self):
 | 
			
		||||
    if self._condition:
 | 
			
		||||
      return self._manager.__enter__()
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
  def __exit__(self, exc_type, exc_val, exc_tb):
 | 
			
		||||
    if self._condition:
 | 
			
		||||
      return self._manager.__exit__(exc_type, exc_val, exc_tb)
 | 
			
		||||
    return None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def Optional(manager, condition):
 | 
			
		||||
  """Wraps the provided context manager and runs it if condition is True.
 | 
			
		||||
 | 
			
		||||
  Args:
 | 
			
		||||
    manager: A context manager to conditionally run.
 | 
			
		||||
    condition: If true, runs the given context manager.
 | 
			
		||||
  Returns:
 | 
			
		||||
    A context manager that conditionally executes the given manager.
 | 
			
		||||
  """
 | 
			
		||||
  return _OptionalContextManager(manager, condition)
 | 
			
		||||
 | 
			
		||||
@ -0,0 +1,34 @@
 | 
			
		||||
# Copyright 2016 The Chromium Authors. All rights reserved.
 | 
			
		||||
# Use of this source code is governed by a BSD-style license that can be
 | 
			
		||||
# found in the LICENSE file.
 | 
			
		||||
 | 
			
		||||
import unittest
 | 
			
		||||
 | 
			
		||||
from py_utils import contextlib_ext
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class OptionalUnittest(unittest.TestCase):
 | 
			
		||||
 | 
			
		||||
  class SampleContextMgr(object):
 | 
			
		||||
 | 
			
		||||
    def __init__(self):
 | 
			
		||||
      self.entered = False
 | 
			
		||||
      self.exited = False
 | 
			
		||||
 | 
			
		||||
    def __enter__(self):
 | 
			
		||||
      self.entered = True
 | 
			
		||||
 | 
			
		||||
    def __exit__(self, exc_type, exc_val, exc_tb):
 | 
			
		||||
      self.exited = True
 | 
			
		||||
 | 
			
		||||
  def testConditionTrue(self):
 | 
			
		||||
    c = self.SampleContextMgr()
 | 
			
		||||
    with contextlib_ext.Optional(c, True):
 | 
			
		||||
      self.assertTrue(c.entered)
 | 
			
		||||
    self.assertTrue(c.exited)
 | 
			
		||||
 | 
			
		||||
  def testConditionFalse(self):
 | 
			
		||||
    c = self.SampleContextMgr()
 | 
			
		||||
    with contextlib_ext.Optional(c, False):
 | 
			
		||||
      self.assertFalse(c.entered)
 | 
			
		||||
    self.assertFalse(c.exited)
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Loading…
	
		Reference in New Issue
	
	Block a user